pax_global_header 0000666 0000000 0000000 00000000064 14143421273 0014513 g ustar 00root root 0000000 0000000 52 comment=9810bb1e5bac844d4692176d8c5ea7d449881b7f
nbclient-0.5.6/ 0000775 0000000 0000000 00000000000 14143421273 0013321 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/.bumpversion.cfg 0000664 0000000 0000000 00000000201 14143421273 0016422 0 ustar 00root root 0000000 0000000 [bumpversion]
current_version = 0.5.6
commit = True
tag = True
tag_name = {new_version}
[bumpversion:file:nbclient/_version.py]
nbclient-0.5.6/.circleci/ 0000775 0000000 0000000 00000000000 14143421273 0015154 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/.circleci/config.yml 0000664 0000000 0000000 00000001304 14143421273 0017142 0 ustar 00root root 0000000 0000000 version: 2
jobs:
build_docs:
docker:
- image: circleci/python:3.6-stretch
steps:
# Get our data and merge with upstream
- run: sudo apt-get update
- checkout
- restore_cache:
keys:
- cache-pip
- run: |
pip install --user .[sphinx]
- save_cache:
key: cache-pip
paths:
- ~/.cache/pip
# Build the docs
- run:
name: Build docs to store
command: |
sphinx-build -W -b html docs/ docs/_build/html
- store_artifacts:
path: docs/_build/html/
destination: html
workflows:
version: 2
default:
jobs:
- build_docs
nbclient-0.5.6/.github/ 0000775 0000000 0000000 00000000000 14143421273 0014661 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/.github/workflows/ 0000775 0000000 0000000 00000000000 14143421273 0016716 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/.github/workflows/main.yml 0000664 0000000 0000000 00000002731 14143421273 0020370 0 ustar 00root root 0000000 0000000 name: CI
on:
push:
branches: '*'
pull_request:
branches: '*'
jobs:
pre-commit:
name: Run pre-commit formatters and linters
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v1
with:
python-version: 3.8
- uses: pre-commit/action@v2.0.0
build-n-test-n-coverage:
name: Build, test and code coverage
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [ 3.6, 3.7, 3.8, 3.9 ]
exclude:
- os: windows-latest
python-version: 3.8
- os: windows-latest
python-version: 3.9
env:
OS: ${{ matrix.os }}
PYTHON: '3.9'
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[test]
pip install tox coverage codecov tox-gh-actions
- name: Run the tests
run: tox
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
with:
file: ./coverage.xml
flags: unittests
env_vars: OS,PYTHON
name: codecov-umbrella
fail_ci_if_error: false
nbclient-0.5.6/.gitignore 0000664 0000000 0000000 00000003470 14143421273 0015315 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
Pipfile
Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Pycharm stuff
.idea/
# VSCode
.vscode
nbclient-0.5.6/.pre-commit-config.yaml 0000664 0000000 0000000 00000001423 14143421273 0017602 0 ustar 00root root 0000000 0000000 ci:
autoupdate_schedule: monthly
autofix_prs: true
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
# - id: check-json
- repo: https://github.com/mgedmin/check-manifest
rev: "0.46"
hooks:
- id: check-manifest
additional_dependencies: [setuptools>=46.4.0]
- repo: https://github.com/pycqa/isort
rev: 5.9.3
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v2.25.0
hooks:
- id: pyupgrade
args: [--py36-plus]
- repo: https://github.com/psf/black
rev: 21.7b0
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.1
hooks:
- id: flake8
nbclient-0.5.6/.readthedocs.yml 0000664 0000000 0000000 00000000524 14143421273 0016410 0 ustar 00root root 0000000 0000000 # .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
sphinx:
configuration: docs/conf.py
formats: all
build:
image: latest
python:
version: 3.7
install:
- method: pip
path: .
extra_requirements:
- sphinx
nbclient-0.5.6/CHANGELOG.md 0000664 0000000 0000000 00000000174 14143421273 0015134 0 ustar 00root root 0000000 0000000 # Change Log
See the Change Log in the [nbclient documentation](https://nbclient.readthedocs.io/en/latest/changelog.html).
nbclient-0.5.6/CONTRIBUTING.md 0000664 0000000 0000000 00000002375 14143421273 0015561 0 ustar 00root root 0000000 0000000 # Contributing
We follow the [Jupyter Contribution Workflow](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html) and the [IPython Contributing Guide](https://github.com/ipython/ipython/blob/master/CONTRIBUTING.md).
## Code formatting
Use the [pre-commit](https://pre-commit.com/) tool to format and lint the codebase:
```console
# to apply to only staged files
$ pre-commit run
# to run against all files
$ pre-commit run --all-files
# to install so that it is run before commits
$ pre-commit install
```
## Testing
In your environment `pip install -e '.[test]'` will be needed to be able to
run all of the tests.
The recommended way to do this is using [tox](https://tox.readthedocs.io/en/latest/):
```console
# to list all environments
$ tox -av
# to run all tests for a specific environment
$ tox -e py38
```
## Documentation
NbClient needs some PRs to copy over documentation!
## Releasing
If you are going to release a version of `nbclient` you should also be capable
of testing it and building the docs.
Please follow the instructions in [Testing](#testing) and [Documentation](#documentation) if
you are unfamiliar with how to do so.
The rest of the release process can be found in [these release instructions](./RELEASING.md).
nbclient-0.5.6/LICENSE 0000664 0000000 0000000 00000005424 14143421273 0014333 0 ustar 00root root 0000000 0000000 # Licensing terms
This project is licensed under the terms of the Modified BSD License
(also known as New or Revised or 3-Clause BSD), as follows:
- Copyright (c) 2020-, Jupyter Development Team
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the Jupyter Development Team nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## About the Jupyter Development Team
The Jupyter Development Team is the set of all contributors to the Jupyter project.
This includes all of the Jupyter subprojects.
The core team that coordinates development on GitHub can be found here:
https://github.com/jupyter/.
## Our Copyright Policy
Jupyter uses a shared copyright model. Each contributor maintains copyright
over their contributions to Jupyter. But, it is important to note that these
contributions are typically only changes to the repositories. Thus, the Jupyter
source code, in its entirety is not the copyright of any single person or
institution. Instead, it is the collective copyright of the entire Jupyter
Development Team. If individual contributors want to maintain a record of what
changes/contributions they have specific copyright on, they should indicate
their copyright in the commit message of the change, when they commit the
change to one of the Jupyter repositories.
With this in mind, the following banner should be used in any source code file
to indicate the copyright and license terms:
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
nbclient-0.5.6/MANIFEST.in 0000664 0000000 0000000 00000001475 14143421273 0015066 0 ustar 00root root 0000000 0000000 include LICENSE
include MANIFEST.in
include requirements.txt
include requirements-dev.txt
include *.md
include .bumpversion.cfg
include tox.ini
include pyproject.toml
include .pre-commit-config.yaml
# Code and test files
recursive-include nbclient *.ipynb
recursive-include nbclient *.png
recursive-include nbclient *.py
# Documentation
graft docs
exclude docs/\#*
exclude .readthedocs.yml
exclude codecov.yml
# Examples
graft examples
# docs subdirs we want to skip
prune docs/build
prune docs/gh-pages
prune docs/dist
# Patterns to exclude from any directory
global-exclude *~
global-exclude *.pyc
global-exclude *.pyo
global-exclude .git
global-exclude .ipynb_checkpoints
# Binder files to be excluded
exclude binder
recursive-exclude binder *.ipynb
recursive-exclude binder *.yml
# Exclude CI/CD files
prune .circleci
nbclient-0.5.6/README.md 0000664 0000000 0000000 00000005224 14143421273 0014603 0 ustar 00root root 0000000 0000000 [](https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb)
[](https://github.com/jupyter/nbclient/actions)
[](https://nbclient.readthedocs.io/en/latest/?badge=latest)
[](https://codecov.io/github/jupyter/nbclient?branch=master)
[](https://www.python.org/downloads/release/python-360/)
[](https://www.python.org/downloads/release/python-370/)
[](https://www.python.org/downloads/release/python-380/)
[](https://www.python.org/downloads/release/python-390/)
[](https://github.com/ambv/black)
# nbclient
**NBClient** lets you **execute** notebooks.
A client library for programmatic notebook execution, **NBClient** is a tool for running Jupyter Notebooks in
different execution contexts, including the command line.
## Interactive Demo
To demo **NBClient** interactively, click this Binder badge to start the demo:
[](https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb)
## Installation
In a terminal, run:
python3 -m pip install nbclient
## Documentation
See [ReadTheDocs](https://nbclient.readthedocs.io/en/latest/) for more in-depth details about the project and the
[API Reference](https://nbclient.readthedocs.io/en/latest/reference/index.html).
## Python Version Support
This library currently supports Python 3.6+ versions. As minor Python
versions are officially sunset by the Python org, nbclient will similarly
drop support in the future.
## Origins
This library used to be part of the [nbconvert](https://nbconvert.readthedocs.io/en/latest/) project. NBClient extracted nbconvert's `ExecutePreprocessor`into its own library for easier updating and importing by downstream libraries and applications.
## Relationship to JupyterClient
NBClient and JupyterClient are distinct projects.
`jupyter_client` is a client library for the jupyter protocol. Specifically, `jupyter_client` provides the Python API
for starting, managing and communicating with Jupyter kernels.
While, nbclient allows notebooks to be run in different execution contexts.
nbclient-0.5.6/RELEASING.md 0000664 0000000 0000000 00000000752 14143421273 0015160 0 ustar 00root root 0000000 0000000 # Releasing
## Prerequisites
- First check that the CHANGELOG (see `docs/changelog.md`) is up to date for the next release version
- Ensure dev requirements are installed `pip install -r requirements-dev.txt`
## Push to GitHub
Change from patch to minor or major for appropriate version updates.
```bash
bumpversion patch
git push upstream && git push upstream --tags
```
## Push to PyPI
```bash
rm -rf dist/*
rm -rf build/*
python setup.py sdist bdist_wheel
twine upload dist/*
```
nbclient-0.5.6/binder/ 0000775 0000000 0000000 00000000000 14143421273 0014564 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/binder/empty_notebook.ipynb 0000664 0000000 0000000 00000003465 14143421273 0020675 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Show a pandas dataframe"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import numpy as np\n",
"import scrapbook as sb"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"data = pd.DataFrame(np.random.randn(20, 2), columns=['a', 'b'])\n",
"data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Use scrapbook to store this data in the notebook\n",
"sb.glue('dataframe', data.to_dict())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Make a matplotlib plot"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Make and display a plot\n",
"fig, ax = plt.subplots()\n",
"ax.scatter(data['a'], data['b'])\n",
"sb.glue('plot', fig, 'display')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {},
"version_major": 2,
"version_minor": 0
}
}
},
"nbformat": 4,
"nbformat_minor": 4
}
nbclient-0.5.6/binder/environment.yml 0000664 0000000 0000000 00000000213 14143421273 0017647 0 ustar 00root root 0000000 0000000 name: nbclient
channels:
- conda-forge
dependencies:
- numpy
- pandas
- matplotlib
- nteract-scrapbook
- nbformat
- nbclient
nbclient-0.5.6/binder/run_nbclient.ipynb 0000664 0000000 0000000 00000005373 14143421273 0020321 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import nbclient\n",
"import nbformat as nbf\n",
"import pandas as pd\n",
"import scrapbook as sb"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Background\n",
"\n",
"This notebook uses `nbclient` to read and execute an *empty* notebook.\n",
"The empty notebook generates some fake data, makes a plot, and stores\n",
"both the data and the plot inside the notebook using the\n",
"[scrapbook package](https://github.com/nteract/scrapbook). We will\n",
"then be able to access the generated contents of the notebook here.\n",
"\n",
"You can see the empty notebook by clicking this button:\n",
"\n",
""
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Read and execute the empty notebook"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# We use nbformat to represent our empty notebook in-memory\n",
"nb = nbf.read('./empty_notebook.ipynb', nbf.NO_CONVERT)\n",
"\n",
"# Execute our in-memory notebook, which will now have outputs\n",
"nb = nbclient.execute(nb)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Inspect the new notebook for its contents"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# First we'll convert our nbformat NotebokNote into a *scrapbook* NotebookNode\n",
"nb = sb.read_notebook(nb)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# We can access the dataframe that was created and glued into the empty notebook\n",
"pd.DataFrame.from_dict(nb.scraps.get('dataframe').data).head()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# We can also access the generated plot by \"re-gluing\" the notebook here\n",
"nb.reglue('plot')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {},
"version_major": 2,
"version_minor": 0
}
}
},
"nbformat": 4,
"nbformat_minor": 4
}
nbclient-0.5.6/docs/ 0000775 0000000 0000000 00000000000 14143421273 0014251 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/docs/Makefile 0000664 0000000 0000000 00000001136 14143421273 0015712 0 ustar 00root root 0000000 0000000 # Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXPROJ = nbclient
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
nbclient-0.5.6/docs/UPDATE.md 0000664 0000000 0000000 00000000154 14143421273 0015555 0 ustar 00root root 0000000 0000000 TODO: Figure out make options needed for non-api changes
```
sphinx-apidoc -f -o reference ../nbclient
```
nbclient-0.5.6/docs/_static/ 0000775 0000000 0000000 00000000000 14143421273 0015677 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/docs/_static/custom.css 0000664 0000000 0000000 00000000704 14143421273 0017724 0 ustar 00root root 0000000 0000000 img.logo {
width:100%
}
.right-next {
float: right;
max-width: 45%;
overflow: auto;
text-overflow: ellipsis;
white-space: nowrap;
}
.right-next::after{
content: ' »';
}
.left-prev {
float: left;
max-width: 45%;
overflow: auto;
text-overflow: ellipsis;
white-space: nowrap;
}
.left-prev::before{
content: '« ';
}
.prev-next-bottom {
margin-top: 3em;
}
.prev-next-top {
margin-bottom: 1em;
}
nbclient-0.5.6/docs/changelog.md 0000664 0000000 0000000 00000016765 14143421273 0016541 0 ustar 00root root 0000000 0000000 # Changelog
## 0.5.6
- Changed `jupyter execute` to `jupyter run` [#173](https://github.com/jupyter/nbclient/pull/173) ([@palewire](https://github.com/palewire))
- Move IPYKERNEL_CELL_NAME from tox to pytest [#172](https://github.com/jupyter/nbclient/pull/172) ([@frenzymadness](https://github.com/frenzymadness))
## 0.5.5
- Added CLI to README [#170](https://github.com/jupyter/nbclient/pull/170) ([@palewire](https://github.com/palewire))
- Add "jupyter execute" command-line interface [#165](https://github.com/jupyter/nbclient/pull/165) ([@palewire](https://github.com/palewire))
- Fix: updating buffers overwrote previous buffers [#169](https://github.com/jupyter/nbclient/pull/169) ([@maartenbreddels](https://github.com/maartenbreddels))
- Fix tests for ipykernel without debugpy [#166](https://github.com/jupyter/nbclient/pull/166) ([@frenzymadness](https://github.com/frenzymadness))
- gitignore Pipfile [#164](https://github.com/jupyter/nbclient/pull/164) ([@palewire](https://github.com/palewire))
- Fixed CONTRIBUTING.md link [#163](https://github.com/jupyter/nbclient/pull/163) ([@palewire](https://github.com/palewire))
- Fix typo [#162](https://github.com/jupyter/nbclient/pull/162) ([@The-Compiler](https://github.com/The-Compiler))
- Move format & lint to pre-commit [#161](https://github.com/jupyter/nbclient/pull/161) ([@chrisjsewell](https://github.com/chrisjsewell))
- Add `skip-execution` cell tag functionality [#151](https://github.com/jupyter/nbclient/pull/151) ([@chrisjsewell](https://github.com/chrisjsewell))
## 0.5.4
- Replace `km.cleanup` with `km.cleanup_resources` [#152](https://github.com/jupyter/nbclient/pull/152) ([@davidbrochart](https://github.com/davidbrochart))
- Use async generator backport only on old python [#154](https://github.com/jupyter/nbclient/pull/154) ([@mkoeppe](https://github.com/mkoeppe))
- Support parsing of IPython dev version [#150](https://github.com/jupyter/nbclient/pull/150) ([@cphyc](https://github.com/cphyc))
- Set `IPYKERNEL_CELL_NAME = ` [#147](https://github.com/jupyter/nbclient/pull/147) ([@davidbrochart](https://github.com/davidbrochart))
- Print useful error message on exception [#142](https://github.com/jupyter/nbclient/pull/142) ([@certik](https://github.com/certik))
## 0.5.3
- Fix ipykernel's `stop_on_error` value to take into account `raises-exception` tag and `force_raise_errors` [#137](https://github.com/jupyter/nbclient/pull/137)
## 0.5.2
- Set minimum python version supported to 3.6.1 to avoid 3.6.0 issues
- CellExecutionError is now unpickleable
- Added testing for python 3.9
- Changed travis tests to github actions
- Documentation referencing an old model instead of NotebookClient was fixed
- `allow_error_names` option was added for a more specific scope of `allow_errors` to be applied
## 0.5.1
- Update kernel client class JIT if it's the synchronous version
- Several documentation fixes / improvements
## 0.5.0
- Move `language_info` retrieval before cell execution [#102](https://github.com/jupyter/nbclient/pull/102)
- HistoryManager setting for ipython kernels no longer applies twice (fix for 5.0 trailets release)
- Improved error handling around language_info missing
- `(async_)start_new_kernel_client` is now split into `(async_)start_new_kernel` and `(async_)start_new_kernel_client`
## 0.4.2 - 0.4.3
These patch releases were removed due to backwards incompatible changes that should have been a minor release.
If you were using these versions for the couple days they were up, move to 0.5.0 and you shouldn't have any issues.
## 0.4.1
- Python type hinting added to most interfaces! [#83](https://github.com/jupyter/nbclient/pull/83)
- Several documentation fixes and improvements were made [#86](https://github.com/jupyter/nbclient/pull/86)
- An asynchronous heart beat check was added to correctly raise a DeadKernelError when kernels die unexpectantly [#90](https://github.com/jupyter/nbclient/pull/90)
## 0.4.0
### Major Changes
- Use KernelManager's graceful shutdown rather than KILLing kernels [#64](https://github.com/jupyter/nbclient/pull/64)
- Mimic an Output widget at the frontend so that the Output widget behaves correctly [#68](https://github.com/jupyter/nbclient/pull/68)
- Nested asyncio is automatic, and works with Tornado [#71](https://github.com/jupyter/nbclient/pull/71)
- `async_execute` now has a `reset_kc` argument to control if the client is reset upon execution request [#53](https://github.com/jupyter/nbclient/pull/53)
### Fixes
- Fix `OSError: [WinError 6] The handle is invalid` for windows/python<3.7 [#77](https://github.com/jupyter/nbclient/pull/77)
- Async wapper Exceptions no longer loose thier caused exception information [#65](https://github.com/jupyter/nbclient/pull/65)
- `extra_arguments` are now configurable by config settings [#66](https://github.com/jupyter/nbclient/pull/66)
### Operational
- Cross-OS testing now run on PRs via Github Actions [#63](https://github.com/jupyter/nbclient/pull/63)
## 0.3.1
### Fixes
- Check that a kernel manager exists before cleaning up the kernel [#61](https://github.com/jupyter/nbclient/pull/61)
- Force client class to be async when kernel manager is MultiKernelManager [#55](https://github.com/jupyter/nbclient/pull/55)
- Replace pip install with conda install in Binder [#54](https://github.com/jupyter/nbclient/pull/54)
## 0.3.0
### Major Changes
- The `(async_)start_new_kernel_client` method now supports starting a new client when its kernel manager (`self.km`) is a `MultiKernelManager`. The method now returns the kernel id in addition to the kernel client. If the kernel manager was a `KernelManager`, the returned kernel id is `None`. [#51](https://github.com/jupyter/nbclient/pull/51)
- Added sphinx-book-theme for documentation. Added a CircleCI job to let us preview the built documentation in a PR. [#50](https://github.com/jupyter/nbclient/pull/50)
- Added `reset_kc` option to `reset_execution_trackers`, so that the kernel client can be reset and a new one created in calls to `(async_)execute` [#44](https://github.com/jupyter/nbclient/pull/44)
### Docs
- Fixed documentation [#46](https://github.com/jupyter/nbclient/pull/46) [#47](https://github.com/jupyter/nbclient/pull/47)
- Added documentation status badge to the README
- Removed conda from documentation build
## 0.2.0
### Major Changes
- Async support is now available on the client. Methods that support async have an `async_` prefix and can be awaited [#10](https://github.com/jupyter/nbclient/pull/10) [#35](https://github.com/jupyter/nbclient/pull/35) [#37](https://github.com/jupyter/nbclient/pull/37) [#38](https://github.com/jupyter/nbclient/pull/38)
- Dropped support for Python 3.5 due to async compatability issues [#34](https://github.com/jupyter/nbclient/pull/34)
- Notebook documents now include the [new kernel timing fields](https://github.com/jupyter/nbformat/pull/144) [#32](https://github.com/jupyter/nbclient/pull/32)
### Fixes
- Memory and process leaks from nbclient should now be fixed [#34](https://github.com/jupyter/nbclient/pull/34)
- Notebook execution exceptions now include error information in addition to the message [#41](https://github.com/jupyter/nbclient/pull/41)
### Docs
- Added [binder examples](https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb) / tests [#7](https://github.com/jupyter/nbclient/pull/7)
- Added changelog to docs [#22](https://github.com/jupyter/nbclient/pull/22)
- Doc typo fixes [#27](https://github.com/jupyter/nbclient/pull/27) [#30](https://github.com/jupyter/nbclient/pull/30)
## 0.1.0
- Initial release -- moved out of nbconvert 6.0.0-a0
nbclient-0.5.6/docs/client.rst 0000664 0000000 0000000 00000021754 14143421273 0016272 0 ustar 00root root 0000000 0000000 Executing notebooks
===================
.. module:: nbclient.client.guide
Jupyter notebooks are often saved with output cells that have been cleared.
NBClient provides a convenient way to execute the input cells of an
.ipynb notebook file and save the results, both input and output cells,
as a .ipynb file.
In this section we show how to execute a ``.ipynb`` notebook
document saving the result in notebook format. If you need to export
notebooks to other formats, such as reStructured Text or Markdown (optionally
executing them) see `nbconvert `_.
Executing notebooks can be very helpful, for example, to run all notebooks
in Python library in one step, or as a way to automate the data analysis in
projects involving more than one notebook.
Using the Python API interface
------------------------------
This section will illustrate the Python API interface.
Example
~~~~~~~
Let's start with a complete quick example, leaving detailed explanations
to the following sections.
**Import**: First we import nbformat and the :class:`NotebookClient`
class::
import nbformat
from nbclient import NotebookClient
**Load**: Assuming that ``notebook_filename`` contains the path to a notebook,
we can load it with::
nb = nbformat.read(notebook_filename, as_version=4)
**Configure**: Next, we configure the notebook execution mode::
client = NotebookClient(nb, timeout=600, kernel_name='python3', resources={'metadata': {'path': 'notebooks/'}})
We specified two (optional) arguments ``timeout`` and ``kernel_name``, which
define respectively the cell execution timeout and the execution kernel.
Usually you don't need to set these options, but these and other options are
available to control execution context. Note that ``path`` specifies
in which folder to execute the notebook.
**Execute/Run**: To actually run the notebook we call the method
``execute``::
client.execute()
Hopefully, we will not get any errors during the notebook execution
(see the last section for error handling). This notebook will
now have its cell outputs populated with the result of running
each cell.
**Save**: Finally, save the resulting notebook with::
nbformat.write(nb, 'executed_notebook.ipynb')
That's all. Your executed notebook will be saved in the current folder
in the file ``executed_notebook.ipynb``.
Execution arguments (traitlets)
-------------------------------
The arguments passed to :class:`NotebookClient` are configuration options
called `traitlets `_.
There are many cool things about traitlets. For example,
they enforce the input type, and they can be accessed/modified as
class attributes.
Let's now discuss in more detail the two traitlets we used.
The ``timeout`` traitlet defines the maximum time (in seconds) each notebook
cell is allowed to run, if the execution takes longer an exception will be
raised. The default is 30 s, so in cases of long-running cells you may want to
specify an higher value. The ``timeout`` option can also be set to ``None``
or ``-1`` to remove any restriction on execution time.
The second traitlet, ``kernel_name``, allows specifying the name of the kernel
to be used for the execution. By default, the kernel name is obtained from the
notebook metadata. The traitlet ``kernel_name`` allows specifying a
user-defined kernel, overriding the value in the notebook metadata. A common
use case is that of a Python 2/3 library which includes documentation/testing
notebooks. These notebooks will specify either a python2 or python3 kernel in
their metadata (depending on the kernel used the last time the notebook was
saved). In reality, these notebooks will work on both Python 2 and Python 3,
and, for testing, it is important to be able to execute them programmatically
on both versions. Here the traitlet ``kernel_name`` helps simplify and
maintain consistency: we can just run a notebook twice, specifying first
"python2" and then "python3" as the kernel name.
Handling errors and exceptions
------------------------------
In the previous sections we saw how to save an executed notebook, assuming
there are no execution errors. But, what if there are errors?
Execution until first error
~~~~~~~~~~~~~~~~~~~~~~~~~~~
An error during the notebook execution, by default, will stop the execution
and raise a ``CellExecutionError``. Conveniently, the source cell causing
the error and the original error name and message are also printed.
After an error, we can still save the notebook as before::
nbformat.write(nb, 'executed_notebook.ipynb')
The saved notebook contains the output up until the failing cell,
and includes a full stack-trace and error (which can help debugging).
Handling errors
~~~~~~~~~~~~~~~
A useful pattern to execute notebooks while handling errors is the following::
from nbclient.exceptions import CellExecutionError
try:
client.execute()
except CellExecutionError:
msg = 'Error executing the notebook "%s".\n\n' % notebook_filename
msg += 'See notebook "%s" for the traceback.' % notebook_filename_out
print(msg)
raise
finally:
nbformat.write(nb, notebook_filename_out)
This will save the executed notebook regardless of execution errors.
In case of errors, however, an additional message is printed and the
``CellExecutionError`` is raised. The message directs the user to
the saved notebook for further inspection.
Execute and save all errors
~~~~~~~~~~~~~~~~~~~~~~~~~~~
As a last scenario, it is sometimes useful to execute notebooks which raise
exceptions, for example to show an error condition. In this case, instead of
stopping the execution on the first error, we can keep executing the notebook
using the traitlet ``allow_errors`` (default is False). With
``allow_errors=True``, the notebook is executed until the end, regardless of
any error encountered during the execution. The output notebook, will contain
the stack-traces and error messages for **all** the cells raising exceptions.
Widget state
------------
If your notebook contains any
`Jupyter Widgets `_,
the state of all the widgets can be stored in the notebook's metadata.
This allows rendering of the live widgets on for instance nbviewer, or when
converting to html.
We can tell nbclient to not store the state using the `store_widget_state`
argument::
client = NotebookClient(nb, store_widget_state=False)
This widget rendering is not performed against a browser during execution, so
only widget default states or states manipulated via user code will be
calculated during execution. ``%%javascript`` cells will execute upon notebook
rendering, enabling complex interactions to function as expected when viewed by
a UI.
If you can't view widget results after execution, you may need to select
:menuselection:`Trust Notebook` under the :menuselection:`File` menu.
Using a command-line interface
------------------------------
This section will illustrate how to run notebooks from your terminal. It supports the most basic use case. For more sophisticated execution options, consider the `papermill `_ library.
This library's command line tool is available by running `jupyter run`. It expects notebooks as input arguments and accepts optional flags to modify the default behavior.
Running a notebook is this easy.::
jupyter run notebook.ipynb
You can pass more than one notebook as well.::
jupyter run notebook.ipynb notebook2.ipynb
By default, notebook errors will be raised and printed into the terminal. You can suppress them by passing the ``--allow-errors`` flag.::
jupyter run notebook.ipynb --allow-errors
Other options allow you to modify the timeout length and dictate the kernel in use. A full set of options is available via the help command.::
jupyter run --help
An application used to execute notebook files (*.ipynb)
Options
=======
The options below are convenience aliases to configurable class-options,
as listed in the "Equivalent to" description-line of the aliases.
To see all configurable class-options for some , use:
--help-all
--allow-errors
Errors are ignored and execution is continued until the end of the notebook.
Equivalent to: [--NbClientApp.allow_errors=True]
--timeout=
The time to wait (in seconds) for output from executions. If a cell
execution takes longer, a TimeoutError is raised. ``-1`` will disable the
timeout.
Default: None
Equivalent to: [--NbClientApp.timeout]
--startup_timeout=
The time to wait (in seconds) for the kernel to start. If kernel startup
takes longer, a RuntimeError is raised.
Default: 60
Equivalent to: [--NbClientApp.startup_timeout]
--kernel_name=
Name of kernel to use to execute the cells. If not set, use the kernel_spec
embedded in the notebook.
Default: ''
Equivalent to: [--NbClientApp.kernel_name]
To see all available configurables, use `--help-all`.
nbclient-0.5.6/docs/conf.py 0000664 0000000 0000000 00000012506 14143421273 0015554 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
#
# nbclient documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 26 16:00:00 2020.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import nbclient
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.napoleon',
'myst_parser',
]
autodoc_mock_imports = ['pytest', 'nbconvert', 'testpath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'nbclient'
copyright = '2020, Project Jupyter'
author = 'Project Jupyter'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(nbclient.__version__.split('.')[0:2])
# The full version, including alpha/beta/rc tags.
release = nbclient.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line foexitr these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'UPDATE.md']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
default_role = 'any'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_book_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
"path_to_docs": "docs",
"repository_url": "https://github.com/jupyter/nbclient",
"repository_branch": "master",
"use_edit_page_button": True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
# html_sidebars = {}
html_title = "nbclient"
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'nclientdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [(master_doc, 'nbclient.tex', 'nbclient Documentation', 'jupyter team', 'manual')]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'nbclient', 'nbclient Documentation', [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
'nbclient',
'nbclient Documentation',
author,
'nbclient',
'One line description of project.',
'Miscellaneous',
)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
nbclient-0.5.6/docs/index.rst 0000664 0000000 0000000 00000004025 14143421273 0016113 0 ustar 00root root 0000000 0000000 Welcome to nbclient
===================
.. image:: https://img.shields.io/github/stars/jupyter/nbclient?label=stars&style=social
:alt: GitHub stars
:target: https://github.com/jupyter/nbclient
.. image:: https://github.com/jupyter/nbclient/workflows/CI/badge.svg
:alt: GitHub Actions
:target: https://github.com/jupyter/nbclient/actions
.. image:: https://codecov.io/github/jupyter/nbclient/coverage.svg?branch=master
:alt: CodeCov
:target: https://codecov.io/github/jupyter/nbclient
---
**NBClient** lets you **execute** notebooks.
A client library for programmatic notebook execution, **NBClient** is a tool for running Jupyter Notebooks in
different execution contexts, including the command line. NBClient was spun out of `nbconvert `_'s
former ``ExecutePreprocessor``.
Demo
----
To demo **NBClient** interactively, click the Binder link below:
.. image:: https://mybinder.org/badge_logo.svg
:target: https://mybinder.org/v2/gh/jupyter/nbclient/master?filepath=binder%2Frun_nbclient.ipynb
Origins
-------
This library used to be part of `nbconvert `_ and was extracted into its ownlibrary for easier updating and importing by downstream libraries and applications.
Python Version Support
----------------------
This library currently supports python 3.6+ versions. As minor python
versions are officially sunset by the python org, nbclient will similarly
drop support in the future.
Documentation
-------------
These pages guide you through the installation and usage of nbclient.
.. toctree::
:maxdepth: 1
:caption: Documentation
installation
client
changelog
API Reference
-------------
If you are looking for information about a specific function, class, or method,
this documentation section will help you.
.. toctree::
:maxdepth: 3
:caption: Table of Contents
reference/index.rst
reference/nbclient.tests.rst
Indices and tables
------------------
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
nbclient-0.5.6/docs/installation.rst 0000664 0000000 0000000 00000000447 14143421273 0017511 0 ustar 00root root 0000000 0000000 Installation
============
Installing nbclient
-------------------
From the command line:
.. code-block:: bash
python3 -m pip install nbclient
.. seealso::
`Installing Jupyter `__
NBClient is part of the Jupyter ecosystem.
nbclient-0.5.6/docs/make.bat 0000664 0000000 0000000 00000001410 14143421273 0015652 0 ustar 00root root 0000000 0000000 @ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
set SPHINXPROJ=nbclient
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd
nbclient-0.5.6/docs/reference/ 0000775 0000000 0000000 00000000000 14143421273 0016207 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/docs/reference/index.rst 0000664 0000000 0000000 00000000254 14143421273 0020051 0 ustar 00root root 0000000 0000000 Reference
=========
This part of the documentation lists the full API reference of all public classes and functions.
.. toctree::
:maxdepth: 2
nbclient
modules
nbclient-0.5.6/docs/reference/modules.rst 0000664 0000000 0000000 00000000075 14143421273 0020413 0 ustar 00root root 0000000 0000000 nbclient
========
.. toctree::
:maxdepth: 4
nbclient
nbclient-0.5.6/docs/reference/nbclient.rst 0000664 0000000 0000000 00000000775 14143421273 0020550 0 ustar 00root root 0000000 0000000 nbclient package
================
Subpackages
-----------
.. toctree::
nbclient.tests
Submodules
----------
nbclient.client module
----------------------
.. automodule:: nbclient.client
:members:
:undoc-members:
:show-inheritance:
nbclient.exceptions module
--------------------------
.. automodule:: nbclient.exceptions
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: nbclient
:members:
:undoc-members:
:show-inheritance:
nbclient-0.5.6/docs/reference/nbclient.tests.rst 0000664 0000000 0000000 00000001266 14143421273 0021705 0 ustar 00root root 0000000 0000000 nbclient.tests package
======================
Submodules
----------
nbclient.tests.base module
--------------------------
.. automodule:: nbclient.tests.base
:members:
:undoc-members:
:show-inheritance:
nbclient.tests.fake\_kernelmanager module
-----------------------------------------
.. automodule:: nbclient.tests.fake_kernelmanager
:members:
:undoc-members:
:show-inheritance:
nbclient.tests.test\_client module
----------------------------------
.. automodule:: nbclient.tests.test_client
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: nbclient.tests
:members:
:undoc-members:
:show-inheritance:
nbclient-0.5.6/docs/requirements-doc.txt 0000664 0000000 0000000 00000000064 14143421273 0020300 0 ustar 00root root 0000000 0000000 Sphinx>=1.7
sphinx-book-theme
mock
moto
myst-parser
nbclient-0.5.6/nbclient/ 0000775 0000000 0000000 00000000000 14143421273 0015117 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/nbclient/__init__.py 0000664 0000000 0000000 00000000741 14143421273 0017232 0 ustar 00root root 0000000 0000000 import subprocess
import sys
from ._version import version as __version__ # noqa: F401
from .client import NotebookClient, execute # noqa: F401
def _cleanup() -> None:
pass
# patch subprocess on Windows for python<3.7
# see https://bugs.python.org/issue37380
# the fix for python3.7: https://github.com/python/cpython/pull/15706/files
if sys.platform == 'win32':
if sys.version_info < (3, 7):
subprocess._cleanup = _cleanup
subprocess._active = None
nbclient-0.5.6/nbclient/_version.py 0000664 0000000 0000000 00000000022 14143421273 0017307 0 ustar 00root root 0000000 0000000 version = '0.5.6'
nbclient-0.5.6/nbclient/cli.py 0000664 0000000 0000000 00000011363 14143421273 0016244 0 ustar 00root root 0000000 0000000 import logging
import pathlib
import sys
from textwrap import dedent
import nbformat
from jupyter_core.application import JupyterApp
from traitlets import Bool, Integer, List, Unicode, default
from traitlets.config import catch_config_error
from nbclient import __version__
from .client import NotebookClient
nbclient_aliases = {
'timeout': 'NbClientApp.timeout',
'startup_timeout': 'NbClientApp.startup_timeout',
'kernel_name': 'NbClientApp.kernel_name',
}
nbclient_flags = {
'allow-errors': (
{
'NbClientApp': {
'allow_errors': True,
},
},
"Errors are ignored and execution is continued until the end of the notebook.",
),
}
class NbClientApp(JupyterApp):
"""
An application used to execute notebook files (``*.ipynb``)
"""
version = __version__
name = 'jupyter-run'
aliases = nbclient_aliases
flags = nbclient_flags
description = Unicode("An application used to execute notebook files (*.ipynb)")
notebooks = List([], help="Path of notebooks to convert").tag(config=True)
timeout: int = Integer(
None,
allow_none=True,
help=dedent(
"""
The time to wait (in seconds) for output from executions.
If a cell execution takes longer, a TimeoutError is raised.
``-1`` will disable the timeout.
"""
),
).tag(config=True)
startup_timeout: int = Integer(
60,
help=dedent(
"""
The time to wait (in seconds) for the kernel to start.
If kernel startup takes longer, a RuntimeError is
raised.
"""
),
).tag(config=True)
allow_errors: bool = Bool(
False,
help=dedent(
"""
When a cell raises an error the default behavior is that
execution is stopped and a `CellExecutionError`
is raised.
If this flag is provided, errors are ignored and execution
is continued until the end of the notebook.
"""
),
).tag(config=True)
skip_cells_with_tag: str = Unicode(
'skip-execution',
help=dedent(
"""
Name of the cell tag to use to denote a cell that should be skipped.
"""
),
).tag(config=True)
kernel_name: str = Unicode(
'',
help=dedent(
"""
Name of kernel to use to execute the cells.
If not set, use the kernel_spec embedded in the notebook.
"""
),
).tag(config=True)
@default('log_level')
def _log_level_default(self):
return logging.INFO
@catch_config_error
def initialize(self, argv=None):
super().initialize(argv)
# Get notebooks to run
self.notebooks = self.get_notebooks()
# If there are none, throw an error
if not self.notebooks:
print(f"{self.name}: error: expected path to notebook")
sys.exit(-1)
# Loop and run them one by one
[self.run_notebook(path) for path in self.notebooks]
def get_notebooks(self):
# If notebooks were provided from the command line, use those
if self.extra_args:
notebooks = self.extra_args
# If not, look to the class attribute
else:
notebooks = self.notebooks
# Return what we got.
return notebooks
def run_notebook(self, notebook_path):
# Log it
self.log.info(f"Executing {notebook_path}")
name = notebook_path.replace(".ipynb", "")
# Get its parent directory so we can add it to the $PATH
path = pathlib.Path(notebook_path).parent.absolute()
# Set the intput file paths
input_path = f"{name}.ipynb"
# Open up the notebook we're going to run
with open(input_path) as f:
nb = nbformat.read(f, as_version=4)
# Configure nbclient to run the notebook
client = NotebookClient(
nb,
timeout=self.timeout,
startup_timeout=self.startup_timeout,
skip_cells_with_tag=self.skip_cells_with_tag,
allow_errors=self.allow_errors,
kernel_name=self.kernel_name,
resources={'metadata': {'path': path}},
)
# Run it
client.execute()
class NbClientAlias(NbClientApp):
"""
An alias to the run command.
"""
name = 'jupyter-execute'
@catch_config_error
def initialize(self, argv=None):
print(
"This alias to `jupyter run` may be deprecated in the future. "
"Please switch to using `run`."
)
super().initialize(argv)
run = NbClientApp.launch_instance
execute = NbClientAlias.launch_instance
nbclient-0.5.6/nbclient/client.py 0000664 0000000 0000000 00000120407 14143421273 0016753 0 ustar 00root root 0000000 0000000 import atexit
import base64
import collections
import datetime
import signal
from textwrap import dedent
try:
from contextlib import asynccontextmanager
except ImportError:
# Use the backport package async-generator for Python < 3.7.
# This should be removed when nbclient drops support for Python 3.6
from async_generator import asynccontextmanager # type: ignore
import asyncio
import typing as t
from contextlib import contextmanager
from queue import Empty
from time import monotonic
from jupyter_client import KernelManager
from jupyter_client.client import KernelClient
from nbformat import NotebookNode
from nbformat.v4 import output_from_msg
from traitlets import Any, Bool, Dict, Enum, Integer, List, Type, Unicode, default
from traitlets.config.configurable import LoggingConfigurable
from .exceptions import (
CellControlSignal,
CellExecutionComplete,
CellExecutionError,
CellTimeoutError,
DeadKernelError,
)
from .output_widget import OutputWidget
from .util import ensure_async, run_sync
def timestamp() -> str:
return datetime.datetime.utcnow().isoformat() + 'Z'
class NotebookClient(LoggingConfigurable):
"""
Encompasses a Client for executing cells in a notebook
"""
timeout: int = Integer(
None,
allow_none=True,
help=dedent(
"""
The time to wait (in seconds) for output from executions.
If a cell execution takes longer, a TimeoutError is raised.
``None`` or ``-1`` will disable the timeout. If ``timeout_func`` is set,
it overrides ``timeout``.
"""
),
).tag(config=True)
timeout_func: t.Any = Any(
default_value=None,
allow_none=True,
help=dedent(
"""
A callable which, when given the cell source as input,
returns the time to wait (in seconds) for output from cell
executions. If a cell execution takes longer, a TimeoutError
is raised.
Returning ``None`` or ``-1`` will disable the timeout for the cell.
Not setting ``timeout_func`` will cause the client to
default to using the ``timeout`` trait for all cells. The
``timeout_func`` trait overrides ``timeout`` if it is not ``None``.
"""
),
).tag(config=True)
interrupt_on_timeout: bool = Bool(
False,
help=dedent(
"""
If execution of a cell times out, interrupt the kernel and
continue executing other cells rather than throwing an error and
stopping.
"""
),
).tag(config=True)
startup_timeout: int = Integer(
60,
help=dedent(
"""
The time to wait (in seconds) for the kernel to start.
If kernel startup takes longer, a RuntimeError is
raised.
"""
),
).tag(config=True)
allow_errors: bool = Bool(
False,
help=dedent(
"""
If ``False`` (default), when a cell raises an error the
execution is stopped and a `CellExecutionError`
is raised, except if the error name is in
``allow_error_names``.
If ``True``, execution errors are ignored and the execution
is continued until the end of the notebook. Output from
exceptions is included in the cell output in both cases.
"""
),
).tag(config=True)
allow_error_names: t.List[str] = List(
Unicode(),
help=dedent(
"""
List of error names which won't stop the execution. Use this if the
``allow_errors`` option it too general and you want to allow only
specific kinds of errors.
"""
),
).tag(config=True)
force_raise_errors: bool = Bool(
False,
help=dedent(
"""
If False (default), errors from executing the notebook can be
allowed with a ``raises-exception`` tag on a single cell, or the
``allow_errors`` or ``allow_error_names`` configurable options for
all cells. An allowed error will be recorded in notebook output, and
execution will continue. If an error occurs when it is not
explicitly allowed, a `CellExecutionError` will be raised.
If True, `CellExecutionError` will be raised for any error that occurs
while executing the notebook. This overrides the ``allow_errors``
and ``allow_error_names`` options and the ``raises-exception`` cell
tag.
"""
),
).tag(config=True)
skip_cells_with_tag: str = Unicode(
'skip-execution',
help=dedent(
"""
Name of the cell tag to use to denote a cell that should be skipped.
"""
),
).tag(config=True)
extra_arguments: t.List = List(Unicode()).tag(config=True)
kernel_name: str = Unicode(
'',
help=dedent(
"""
Name of kernel to use to execute the cells.
If not set, use the kernel_spec embedded in the notebook.
"""
),
).tag(config=True)
raise_on_iopub_timeout: bool = Bool(
False,
help=dedent(
"""
If ``False`` (default), then the kernel will continue waiting for
iopub messages until it receives a kernel idle message, or until a
timeout occurs, at which point the currently executing cell will be
skipped. If ``True``, then an error will be raised after the first
timeout. This option generally does not need to be used, but may be
useful in contexts where there is the possibility of executing
notebooks with memory-consuming infinite loops.
"""
),
).tag(config=True)
store_widget_state: bool = Bool(
True,
help=dedent(
"""
If ``True`` (default), then the state of the Jupyter widgets created
at the kernel will be stored in the metadata of the notebook.
"""
),
).tag(config=True)
record_timing: bool = Bool(
True,
help=dedent(
"""
If ``True`` (default), then the execution timings of each cell will
be stored in the metadata of the notebook.
"""
),
).tag(config=True)
iopub_timeout: int = Integer(
4,
allow_none=False,
help=dedent(
"""
The time to wait (in seconds) for IOPub output. This generally
doesn't need to be set, but on some slow networks (such as CI
systems) the default timeout might not be long enough to get all
messages.
"""
),
).tag(config=True)
shell_timeout_interval: int = Integer(
5,
allow_none=False,
help=dedent(
"""
The time to wait (in seconds) for Shell output before retrying.
This generally doesn't need to be set, but if one needs to check
for dead kernels at a faster rate this can help.
"""
),
).tag(config=True)
shutdown_kernel = Enum(
['graceful', 'immediate'],
default_value='graceful',
help=dedent(
"""
If ``graceful`` (default), then the kernel is given time to clean
up after executing all cells, e.g., to execute its ``atexit`` hooks.
If ``immediate``, then the kernel is signaled to immediately
terminate.
"""
),
).tag(config=True)
ipython_hist_file: str = Unicode(
default_value=':memory:',
help="""Path to file to use for SQLite history database for an IPython kernel.
The specific value ``:memory:`` (including the colon
at both end but not the back ticks), avoids creating a history file. Otherwise, IPython
will create a history file for each kernel.
When running kernels simultaneously (e.g. via multiprocessing) saving history a single
SQLite file can result in database errors, so using ``:memory:`` is recommended in
non-interactive contexts.
""",
).tag(config=True)
kernel_manager_class: KernelManager = Type(config=True, help='The kernel manager class to use.')
@default('kernel_manager_class')
def _kernel_manager_class_default(self) -> KernelManager:
"""Use a dynamic default to avoid importing jupyter_client at startup"""
from jupyter_client import AsyncKernelManager
return AsyncKernelManager
_display_id_map: t.Dict[str, t.Dict] = Dict(
help=dedent(
"""
mapping of locations of outputs with a given display_id
tracks cell index and output index within cell.outputs for
each appearance of the display_id
{
'display_id': {
cell_idx: [output_idx,]
}
}
"""
)
)
display_data_priority: t.List = List(
[
'text/html',
'application/pdf',
'text/latex',
'image/svg+xml',
'image/png',
'image/jpeg',
'text/markdown',
'text/plain',
],
help="""
An ordered list of preferred output type, the first
encountered will usually be used when converting discarding
the others.
""",
).tag(config=True)
resources: t.Dict = Dict(
help=dedent(
"""
Additional resources used in the conversion process. For example,
passing ``{'metadata': {'path': run_path}}`` sets the
execution path to ``run_path``.
"""
)
)
def __init__(self, nb: NotebookNode, km: t.Optional[KernelManager] = None, **kw) -> None:
"""Initializes the execution manager.
Parameters
----------
nb : NotebookNode
Notebook being executed.
km : KernelManager (optional)
Optional kernel manager. If none is provided, a kernel manager will
be created.
"""
super().__init__(**kw)
self.nb: NotebookNode = nb
self.km: t.Optional[KernelManager] = km
self.owns_km: bool = km is None # whether the NotebookClient owns the kernel manager
self.kc: t.Optional[KernelClient] = None
self.reset_execution_trackers()
self.widget_registry: t.Dict[str, t.Dict] = {
'@jupyter-widgets/output': {'OutputModel': OutputWidget}
}
# comm_open_handlers should return an object with a .handle_msg(msg) method or None
self.comm_open_handlers: t.Dict[str, t.Any] = {
'jupyter.widget': self.on_comm_open_jupyter_widget
}
def reset_execution_trackers(self) -> None:
"""Resets any per-execution trackers."""
self.task_poll_for_reply: t.Optional[asyncio.Future] = None
self.code_cells_executed = 0
self._display_id_map = {}
self.widget_state: t.Dict[str, t.Dict] = {}
self.widget_buffers: t.Dict[str, t.Dict[t.Tuple[str, ...], t.Dict[str, str]]] = {}
# maps to list of hooks, where the last is used, this is used
# to support nested use of output widgets.
self.output_hook_stack: t.Any = collections.defaultdict(list)
# our front-end mimicing Output widgets
self.comm_objects: t.Dict[str, t.Any] = {}
def create_kernel_manager(self) -> KernelManager:
"""Creates a new kernel manager.
Returns
-------
km : KernelManager
Kernel manager whose client class is asynchronous.
"""
if not self.kernel_name:
kn = self.nb.metadata.get('kernelspec', {}).get('name')
if kn is not None:
self.kernel_name = kn
if not self.kernel_name:
self.km = self.kernel_manager_class(config=self.config)
else:
self.km = self.kernel_manager_class(kernel_name=self.kernel_name, config=self.config)
# If the current kernel manager is still using the default (synchronous) KernelClient class,
# switch to the async version since that's what NBClient prefers.
if self.km.client_class == 'jupyter_client.client.KernelClient':
self.km.client_class = 'jupyter_client.asynchronous.AsyncKernelClient'
return self.km
async def _async_cleanup_kernel(self) -> None:
assert self.km is not None
now = self.shutdown_kernel == "immediate"
try:
# Queue the manager to kill the process, and recover gracefully if it's already dead.
if await ensure_async(self.km.is_alive()):
await ensure_async(self.km.shutdown_kernel(now=now))
except RuntimeError as e:
# The error isn't specialized, so we have to check the message
if 'No kernel is running!' not in str(e):
raise
finally:
# Remove any state left over even if we failed to stop the kernel
await ensure_async(self.km.cleanup_resources())
if getattr(self, "kc") and self.kc is not None:
await ensure_async(self.kc.stop_channels())
self.kc = None
self.km = None
_cleanup_kernel = run_sync(_async_cleanup_kernel)
async def async_start_new_kernel(self, **kwargs) -> None:
"""Creates a new kernel.
Parameters
----------
kwargs :
Any options for ``self.kernel_manager_class.start_kernel()``. Because
that defaults to AsyncKernelManager, this will likely include options
accepted by ``AsyncKernelManager.start_kernel()``, which includes ``cwd``.
"""
assert self.km is not None
resource_path = self.resources.get('metadata', {}).get('path') or None
if resource_path and 'cwd' not in kwargs:
kwargs["cwd"] = resource_path
has_history_manager_arg = any(
arg.startswith('--HistoryManager.hist_file') for arg in self.extra_arguments
)
if (
hasattr(self.km, 'ipykernel')
and self.km.ipykernel
and self.ipython_hist_file
and not has_history_manager_arg
):
self.extra_arguments += [f'--HistoryManager.hist_file={self.ipython_hist_file}']
await ensure_async(self.km.start_kernel(extra_arguments=self.extra_arguments, **kwargs))
start_new_kernel = run_sync(async_start_new_kernel)
async def async_start_new_kernel_client(self) -> KernelClient:
"""Creates a new kernel client.
Returns
-------
kc : KernelClient
Kernel client as created by the kernel manager ``km``.
"""
assert self.km is not None
self.kc = self.km.client()
await ensure_async(self.kc.start_channels())
try:
await ensure_async(self.kc.wait_for_ready(timeout=self.startup_timeout))
except RuntimeError:
await self._async_cleanup_kernel()
raise
self.kc.allow_stdin = False
return self.kc
start_new_kernel_client = run_sync(async_start_new_kernel_client)
@contextmanager
def setup_kernel(self, **kwargs) -> t.Generator:
"""
Context manager for setting up the kernel to execute a notebook.
The assigns the Kernel Manager (``self.km``) if missing and Kernel Client(``self.kc``).
When control returns from the yield it stops the client's zmq channels, and shuts
down the kernel.
"""
# by default, cleanup the kernel client if we own the kernel manager
# and keep it alive if we don't
cleanup_kc = kwargs.pop('cleanup_kc', self.owns_km)
# Can't use run_until_complete on an asynccontextmanager function :(
if self.km is None:
self.km = self.create_kernel_manager()
if not self.km.has_kernel:
self.start_new_kernel(**kwargs)
self.start_new_kernel_client()
try:
yield
finally:
if cleanup_kc:
self._cleanup_kernel()
@asynccontextmanager
async def async_setup_kernel(self, **kwargs) -> t.AsyncGenerator:
"""
Context manager for setting up the kernel to execute a notebook.
This assigns the Kernel Manager (``self.km``) if missing and Kernel Client(``self.kc``).
When control returns from the yield it stops the client's zmq channels, and shuts
down the kernel.
Handlers for SIGINT and SIGTERM are also added to cleanup in case of unexpected shutdown.
"""
# by default, cleanup the kernel client if we own the kernel manager
# and keep it alive if we don't
cleanup_kc = kwargs.pop('cleanup_kc', self.owns_km)
if self.km is None:
self.km = self.create_kernel_manager()
# self._cleanup_kernel uses run_async, which ensures the ioloop is running again.
# This is necessary as the ioloop has stopped once atexit fires.
atexit.register(self._cleanup_kernel)
def on_signal():
asyncio.ensure_future(self._async_cleanup_kernel())
atexit.unregister(self._cleanup_kernel)
loop = asyncio.get_event_loop()
try:
loop.add_signal_handler(signal.SIGINT, on_signal)
loop.add_signal_handler(signal.SIGTERM, on_signal)
except (NotImplementedError, RuntimeError):
# NotImplementedError: Windows does not support signals.
# RuntimeError: Raised when add_signal_handler is called outside the main thread
pass
if not self.km.has_kernel:
await self.async_start_new_kernel(**kwargs)
await self.async_start_new_kernel_client()
try:
yield
finally:
if cleanup_kc:
await self._async_cleanup_kernel()
atexit.unregister(self._cleanup_kernel)
try:
loop.remove_signal_handler(signal.SIGINT)
loop.remove_signal_handler(signal.SIGTERM)
except (NotImplementedError, RuntimeError):
pass
async def async_execute(self, reset_kc: bool = False, **kwargs) -> NotebookNode:
"""
Executes each code cell.
Parameters
----------
kwargs :
Any option for ``self.kernel_manager_class.start_kernel()``. Because
that defaults to AsyncKernelManager, this will likely include options
accepted by ``jupyter_client.AsyncKernelManager.start_kernel()``,
which includes ``cwd``.
``reset_kc`` if True, the kernel client will be reset and a new one
will be created (default: False).
Returns
-------
nb : NotebookNode
The executed notebook.
"""
if reset_kc and self.owns_km:
await self._async_cleanup_kernel()
self.reset_execution_trackers()
async with self.async_setup_kernel(**kwargs):
assert self.kc is not None
self.log.info("Executing notebook with kernel: %s" % self.kernel_name)
msg_id = await ensure_async(self.kc.kernel_info())
info_msg = await self.async_wait_for_reply(msg_id)
if info_msg is not None:
if 'language_info' in info_msg['content']:
self.nb.metadata['language_info'] = info_msg['content']['language_info']
else:
raise RuntimeError(
'Kernel info received message content has no "language_info" key. '
'Content is:\n' + str(info_msg['content'])
)
for index, cell in enumerate(self.nb.cells):
# Ignore `'execution_count' in content` as it's always 1
# when store_history is False
await self.async_execute_cell(
cell, index, execution_count=self.code_cells_executed + 1
)
self.set_widgets_metadata()
return self.nb
execute = run_sync(async_execute)
def set_widgets_metadata(self) -> None:
if self.widget_state:
self.nb.metadata.widgets = {
'application/vnd.jupyter.widget-state+json': {
'state': {
model_id: self._serialize_widget_state(state)
for model_id, state in self.widget_state.items()
if '_model_name' in state
},
'version_major': 2,
'version_minor': 0,
}
}
for key, widget in self.nb.metadata.widgets[
'application/vnd.jupyter.widget-state+json'
]['state'].items():
buffers = self.widget_buffers.get(key)
if buffers:
widget['buffers'] = list(buffers.values())
def _update_display_id(self, display_id: str, msg: t.Dict) -> None:
"""Update outputs with a given display_id"""
if display_id not in self._display_id_map:
self.log.debug("display id %r not in %s", display_id, self._display_id_map)
return
if msg['header']['msg_type'] == 'update_display_data':
msg['header']['msg_type'] = 'display_data'
try:
out = output_from_msg(msg)
except ValueError:
self.log.error("unhandled iopub msg: " + msg['msg_type'])
return
for cell_idx, output_indices in self._display_id_map[display_id].items():
cell = self.nb['cells'][cell_idx]
outputs = cell['outputs']
for output_idx in output_indices:
outputs[output_idx]['data'] = out['data']
outputs[output_idx]['metadata'] = out['metadata']
async def _async_poll_for_reply(
self,
msg_id: str,
cell: NotebookNode,
timeout: t.Optional[int],
task_poll_output_msg: asyncio.Future,
task_poll_kernel_alive: asyncio.Future,
) -> t.Dict:
assert self.kc is not None
new_timeout: t.Optional[float] = None
if timeout is not None:
deadline = monotonic() + timeout
new_timeout = float(timeout)
while True:
try:
msg = await ensure_async(self.kc.shell_channel.get_msg(timeout=new_timeout))
if msg['parent_header'].get('msg_id') == msg_id:
if self.record_timing:
cell['metadata']['execution']['shell.execute_reply'] = timestamp()
try:
await asyncio.wait_for(task_poll_output_msg, self.iopub_timeout)
except (asyncio.TimeoutError, Empty):
if self.raise_on_iopub_timeout:
task_poll_kernel_alive.cancel()
raise CellTimeoutError.error_from_timeout_and_cell(
"Timeout waiting for IOPub output", self.iopub_timeout, cell
)
else:
self.log.warning("Timeout waiting for IOPub output")
task_poll_kernel_alive.cancel()
return msg
else:
if new_timeout is not None:
new_timeout = max(0, deadline - monotonic())
except Empty:
# received no message, check if kernel is still alive
assert timeout is not None
task_poll_kernel_alive.cancel()
await self._async_check_alive()
await self._async_handle_timeout(timeout, cell)
async def _async_poll_output_msg(
self, parent_msg_id: str, cell: NotebookNode, cell_index: int
) -> None:
assert self.kc is not None
while True:
msg = await ensure_async(self.kc.iopub_channel.get_msg(timeout=None))
if msg['parent_header'].get('msg_id') == parent_msg_id:
try:
# Will raise CellExecutionComplete when completed
self.process_message(msg, cell, cell_index)
except CellExecutionComplete:
return
async def _async_poll_kernel_alive(self) -> None:
while True:
await asyncio.sleep(1)
try:
await self._async_check_alive()
except DeadKernelError:
assert self.task_poll_for_reply is not None
self.task_poll_for_reply.cancel()
return
def _get_timeout(self, cell: t.Optional[NotebookNode]) -> int:
if self.timeout_func is not None and cell is not None:
timeout = self.timeout_func(cell)
else:
timeout = self.timeout
if not timeout or timeout < 0:
timeout = None
return timeout
async def _async_handle_timeout(
self, timeout: int, cell: t.Optional[NotebookNode] = None
) -> None:
self.log.error("Timeout waiting for execute reply (%is)." % timeout)
if self.interrupt_on_timeout:
self.log.error("Interrupting kernel")
assert self.km is not None
await ensure_async(self.km.interrupt_kernel())
else:
raise CellTimeoutError.error_from_timeout_and_cell(
"Cell execution timed out", timeout, cell
)
async def _async_check_alive(self) -> None:
assert self.kc is not None
if not await ensure_async(self.kc.is_alive()):
self.log.error("Kernel died while waiting for execute reply.")
raise DeadKernelError("Kernel died")
async def async_wait_for_reply(
self, msg_id: str, cell: t.Optional[NotebookNode] = None
) -> t.Optional[t.Dict]:
assert self.kc is not None
# wait for finish, with timeout
timeout = self._get_timeout(cell)
cummulative_time = 0
while True:
try:
msg = await ensure_async(
self.kc.shell_channel.get_msg(timeout=self.shell_timeout_interval)
)
except Empty:
await self._async_check_alive()
cummulative_time += self.shell_timeout_interval
if timeout and cummulative_time > timeout:
await self._async_async_handle_timeout(timeout, cell)
break
else:
if msg['parent_header'].get('msg_id') == msg_id:
return msg
return None
wait_for_reply = run_sync(async_wait_for_reply)
# Backwards compatability naming for papermill
_wait_for_reply = wait_for_reply
def _passed_deadline(self, deadline: int) -> bool:
if deadline is not None and deadline - monotonic() <= 0:
return True
return False
def _check_raise_for_error(self, cell: NotebookNode, exec_reply: t.Optional[t.Dict]) -> None:
if exec_reply is None:
return None
exec_reply_content = exec_reply['content']
if exec_reply_content['status'] != 'error':
return None
cell_allows_errors = (not self.force_raise_errors) and (
self.allow_errors
or exec_reply_content.get('ename') in self.allow_error_names
or "raises-exception" in cell.metadata.get("tags", [])
)
if not cell_allows_errors:
raise CellExecutionError.from_cell_and_msg(cell, exec_reply_content)
async def async_execute_cell(
self,
cell: NotebookNode,
cell_index: int,
execution_count: t.Optional[int] = None,
store_history: bool = True,
) -> NotebookNode:
"""
Executes a single code cell.
To execute all cells see :meth:`execute`.
Parameters
----------
cell : nbformat.NotebookNode
The cell which is currently being processed.
cell_index : int
The position of the cell within the notebook object.
execution_count : int
The execution count to be assigned to the cell (default: Use kernel response)
store_history : bool
Determines if history should be stored in the kernel (default: False).
Specific to ipython kernels, which can store command histories.
Returns
-------
output : dict
The execution output payload (or None for no output).
Raises
------
CellExecutionError
If execution failed and should raise an exception, this will be raised
with defaults about the failure.
Returns
-------
cell : NotebookNode
The cell which was just processed.
"""
assert self.kc is not None
if cell.cell_type != 'code' or not cell.source.strip():
self.log.debug("Skipping non-executing cell %s", cell_index)
return cell
if self.skip_cells_with_tag in cell.metadata.get("tags", []):
self.log.debug("Skipping tagged cell %s", cell_index)
return cell
if self.record_timing and 'execution' not in cell['metadata']:
cell['metadata']['execution'] = {}
self.log.debug("Executing cell:\n%s", cell.source)
cell_allows_errors = (not self.force_raise_errors) and (
self.allow_errors or "raises-exception" in cell.metadata.get("tags", [])
)
parent_msg_id = await ensure_async(
self.kc.execute(
cell.source, store_history=store_history, stop_on_error=not cell_allows_errors
)
)
# We launched a code cell to execute
self.code_cells_executed += 1
exec_timeout = self._get_timeout(cell)
cell.outputs = []
self.clear_before_next_output = False
task_poll_kernel_alive = asyncio.ensure_future(self._async_poll_kernel_alive())
task_poll_output_msg = asyncio.ensure_future(
self._async_poll_output_msg(parent_msg_id, cell, cell_index)
)
self.task_poll_for_reply = asyncio.ensure_future(
self._async_poll_for_reply(
parent_msg_id, cell, exec_timeout, task_poll_output_msg, task_poll_kernel_alive
)
)
try:
exec_reply = await self.task_poll_for_reply
except asyncio.CancelledError:
# can only be cancelled by task_poll_kernel_alive when the kernel is dead
task_poll_output_msg.cancel()
raise DeadKernelError("Kernel died")
except Exception as e:
# Best effort to cancel request if it hasn't been resolved
try:
# Check if the task_poll_output is doing the raising for us
if not isinstance(e, CellControlSignal):
task_poll_output_msg.cancel()
finally:
raise
if execution_count:
cell['execution_count'] = execution_count
self._check_raise_for_error(cell, exec_reply)
self.nb['cells'][cell_index] = cell
return cell
execute_cell = run_sync(async_execute_cell)
def process_message(
self, msg: t.Dict, cell: NotebookNode, cell_index: int
) -> t.Optional[t.List]:
"""
Processes a kernel message, updates cell state, and returns the
resulting output object that was appended to cell.outputs.
The input argument *cell* is modified in-place.
Parameters
----------
msg : dict
The kernel message being processed.
cell : nbformat.NotebookNode
The cell which is currently being processed.
cell_index : int
The position of the cell within the notebook object.
Returns
-------
output : dict
The execution output payload (or None for no output).
Raises
------
CellExecutionComplete
Once a message arrives which indicates computation completeness.
"""
msg_type = msg['msg_type']
self.log.debug("msg_type: %s", msg_type)
content = msg['content']
self.log.debug("content: %s", content)
display_id = content.get('transient', {}).get('display_id', None)
if display_id and msg_type in {'execute_result', 'display_data', 'update_display_data'}:
self._update_display_id(display_id, msg)
# set the prompt number for the input and the output
if 'execution_count' in content:
cell['execution_count'] = content['execution_count']
if self.record_timing:
if msg_type == 'status':
if content['execution_state'] == 'idle':
cell['metadata']['execution']['iopub.status.idle'] = timestamp()
elif content['execution_state'] == 'busy':
cell['metadata']['execution']['iopub.status.busy'] = timestamp()
elif msg_type == 'execute_input':
cell['metadata']['execution']['iopub.execute_input'] = timestamp()
if msg_type == 'status':
if content['execution_state'] == 'idle':
raise CellExecutionComplete()
elif msg_type == 'clear_output':
self.clear_output(cell.outputs, msg, cell_index)
elif msg_type.startswith('comm'):
self.handle_comm_msg(cell.outputs, msg, cell_index)
# Check for remaining messages we don't process
elif msg_type not in ['execute_input', 'update_display_data']:
# Assign output as our processed "result"
return self.output(cell.outputs, msg, display_id, cell_index)
return None
def output(
self, outs: t.List, msg: t.Dict, display_id: str, cell_index: int
) -> t.Optional[t.List]:
msg_type = msg['msg_type']
parent_msg_id = msg['parent_header'].get('msg_id')
if self.output_hook_stack[parent_msg_id]:
# if we have a hook registered, it will overrride our
# default output behaviour (e.g. OutputWidget)
hook = self.output_hook_stack[parent_msg_id][-1]
hook.output(outs, msg, display_id, cell_index)
return None
try:
out = output_from_msg(msg)
except ValueError:
self.log.error("unhandled iopub msg: " + msg_type)
return None
if self.clear_before_next_output:
self.log.debug('Executing delayed clear_output')
outs[:] = []
self.clear_display_id_mapping(cell_index)
self.clear_before_next_output = False
if display_id:
# record output index in:
# _display_id_map[display_id][cell_idx]
cell_map = self._display_id_map.setdefault(display_id, {})
output_idx_list = cell_map.setdefault(cell_index, [])
output_idx_list.append(len(outs))
outs.append(out)
return out
def clear_output(self, outs: t.List, msg: t.Dict, cell_index: int) -> None:
content = msg['content']
parent_msg_id = msg['parent_header'].get('msg_id')
if self.output_hook_stack[parent_msg_id]:
# if we have a hook registered, it will overrride our
# default clear_output behaviour (e.g. OutputWidget)
hook = self.output_hook_stack[parent_msg_id][-1]
hook.clear_output(outs, msg, cell_index)
return
if content.get('wait'):
self.log.debug('Wait to clear output')
self.clear_before_next_output = True
else:
self.log.debug('Immediate clear output')
outs[:] = []
self.clear_display_id_mapping(cell_index)
def clear_display_id_mapping(self, cell_index: int) -> None:
for display_id, cell_map in self._display_id_map.items():
if cell_index in cell_map:
cell_map[cell_index] = []
def handle_comm_msg(self, outs: t.List, msg: t.Dict, cell_index: int) -> None:
content = msg['content']
data = content['data']
if self.store_widget_state and 'state' in data: # ignore custom msg'es
self.widget_state.setdefault(content['comm_id'], {}).update(data['state'])
if 'buffer_paths' in data and data['buffer_paths']:
comm_id = content['comm_id']
if comm_id not in self.widget_buffers:
self.widget_buffers[comm_id] = {}
# for each comm, the path uniquely identifies a buffer
new_buffers: t.Dict[t.Tuple[str, ...], t.Dict[str, str]] = {
tuple(k["path"]): k for k in self._get_buffer_data(msg)
}
self.widget_buffers[comm_id].update(new_buffers)
# There are cases where we need to mimic a frontend, to get similar behaviour as
# when using the Output widget from Jupyter lab/notebook
if msg['msg_type'] == 'comm_open':
target = msg['content'].get('target_name')
handler = self.comm_open_handlers.get(target)
if handler:
comm_id = msg['content']['comm_id']
comm_object = handler(msg)
if comm_object:
self.comm_objects[comm_id] = comm_object
else:
self.log.warning(f'No handler found for comm target {target!r}')
elif msg['msg_type'] == 'comm_msg':
content = msg['content']
comm_id = msg['content']['comm_id']
if comm_id in self.comm_objects:
self.comm_objects[comm_id].handle_msg(msg)
def _serialize_widget_state(self, state: t.Dict) -> t.Dict[str, t.Any]:
"""Serialize a widget state, following format in @jupyter-widgets/schema."""
return {
'model_name': state.get('_model_name'),
'model_module': state.get('_model_module'),
'model_module_version': state.get('_model_module_version'),
'state': state,
}
def _get_buffer_data(self, msg: t.Dict) -> t.List[t.Dict[str, str]]:
encoded_buffers = []
paths = msg['content']['data']['buffer_paths']
buffers = msg['buffers']
for path, buffer in zip(paths, buffers):
encoded_buffers.append(
{
'data': base64.b64encode(buffer).decode('utf-8'),
'encoding': 'base64',
'path': path,
}
)
return encoded_buffers
def register_output_hook(self, msg_id: str, hook: OutputWidget) -> None:
"""Registers an override object that handles output/clear_output instead.
Multiple hooks can be registered, where the last one will be used (stack based)
"""
# mimics
# https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#registermessagehook
self.output_hook_stack[msg_id].append(hook)
def remove_output_hook(self, msg_id: str, hook: OutputWidget) -> None:
"""Unregisters an override object that handles output/clear_output instead"""
# mimics
# https://jupyterlab.github.io/jupyterlab/services/interfaces/kernel.ikernelconnection.html#removemessagehook
removed_hook = self.output_hook_stack[msg_id].pop()
assert removed_hook == hook
def on_comm_open_jupyter_widget(self, msg: t.Dict):
content = msg['content']
data = content['data']
state = data['state']
comm_id = msg['content']['comm_id']
module = self.widget_registry.get(state['_model_module'])
if module:
widget_class = module.get(state['_model_name'])
if widget_class:
return widget_class(comm_id, state, self.kc, self)
def execute(
nb: NotebookNode, cwd: t.Optional[str] = None, km: t.Optional[KernelManager] = None, **kwargs
) -> NotebookClient:
"""Execute a notebook's code, updating outputs within the notebook object.
This is a convenient wrapper around NotebookClient. It returns the
modified notebook object.
Parameters
----------
nb : NotebookNode
The notebook object to be executed
cwd : str, optional
If supplied, the kernel will run in this directory
km : AsyncKernelManager, optional
If supplied, the specified kernel manager will be used for code execution.
kwargs :
Any other options for NotebookClient, e.g. timeout, kernel_name
"""
resources = {}
if cwd is not None:
resources['metadata'] = {'path': cwd}
return NotebookClient(nb=nb, resources=resources, km=km, **kwargs).execute()
nbclient-0.5.6/nbclient/exceptions.py 0000664 0000000 0000000 00000006106 14143421273 0017655 0 ustar 00root root 0000000 0000000 from typing import Dict
from nbformat import NotebookNode
class CellControlSignal(Exception):
"""
A custom exception used to indicate that the exception is used for cell
control actions (not the best model, but it's needed to cover existing
behavior without major refactors).
"""
pass
class CellTimeoutError(TimeoutError, CellControlSignal):
"""
A custom exception to capture when a cell has timed out during execution.
"""
@classmethod
def error_from_timeout_and_cell(cls, msg: str, timeout: int, cell: NotebookNode):
if cell and cell.source:
src_by_lines = cell.source.strip().split("\n")
src = (
cell.source
if len(src_by_lines) < 11
else f"{src_by_lines[:5]}\n...\n{src_by_lines[-5:]}"
)
else:
src = "Cell contents not found."
return cls(timeout_err_msg.format(timeout=timeout, msg=msg, cell_contents=src))
class DeadKernelError(RuntimeError):
pass
class CellExecutionComplete(CellControlSignal):
"""
Used as a control signal for cell execution across execute_cell and
process_message function calls. Raised when all execution requests
are completed and no further messages are expected from the kernel
over zeromq channels.
"""
pass
class CellExecutionError(CellControlSignal):
"""
Custom exception to propagate exceptions that are raised during
notebook execution to the caller. This is mostly useful when
using nbconvert as a library, since it allows to deal with
failures gracefully.
"""
def __init__(self, traceback: str, ename: str, evalue: str) -> None:
super().__init__(traceback)
self.traceback = traceback
self.ename = ename
self.evalue = evalue
def __reduce__(self) -> tuple:
return type(self), (self.traceback, self.ename, self.evalue)
def __str__(self) -> str:
s = self.__unicode__()
if not isinstance(s, str):
s = s.encode('utf8', 'replace')
return s
def __unicode__(self) -> str:
return self.traceback
@classmethod
def from_cell_and_msg(cls, cell: NotebookNode, msg: Dict):
"""Instantiate from a code cell object and a message contents
(message is either execute_reply or error)
"""
tb = '\n'.join(msg.get('traceback', []) or [])
return cls(
exec_err_msg.format(
cell=cell,
traceback=tb,
ename=msg.get('ename', ''),
evalue=msg.get('evalue', ''),
),
ename=msg.get('ename', ''),
evalue=msg.get('evalue', ''),
)
exec_err_msg: str = """\
An error occurred while executing the following cell:
------------------
{cell.source}
------------------
{traceback}
{ename}: {evalue}
"""
timeout_err_msg: str = """\
A cell timed out while it was being executed, after {timeout} seconds.
The message was: {msg}.
Here is a preview of the cell contents:
-------------------
{cell_contents}
-------------------
"""
nbclient-0.5.6/nbclient/jsonutil.py 0000664 0000000 0000000 00000014706 14143421273 0017350 0 ustar 00root root 0000000 0000000 """Utilities to manipulate JSON objects."""
# NOTE: this is a copy of ipykernel/jsonutils.py (+blackified)
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import math
import numbers
import re
import types
from binascii import b2a_base64
from datetime import datetime
from typing import Dict
from ipython_genutils import py3compat
from ipython_genutils.py3compat import iteritems, unicode_type
next_attr_name = '__next__' if py3compat.PY3 else 'next'
# -----------------------------------------------------------------------------
# Globals and constants
# -----------------------------------------------------------------------------
# timestamp formats
ISO8601 = "%Y-%m-%dT%H:%M:%S.%f"
ISO8601_PAT = re.compile(
r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(\.\d{1,6})?Z?([\+\-]\d{2}:?\d{2})?$"
)
# holy crap, strptime is not threadsafe.
# Calling it once at import seems to help.
datetime.strptime("1", "%d")
# -----------------------------------------------------------------------------
# Classes and functions
# -----------------------------------------------------------------------------
# constants for identifying png/jpeg data
PNG = b'\x89PNG\r\n\x1a\n'
# front of PNG base64-encoded
PNG64 = b'iVBORw0KG'
JPEG = b'\xff\xd8'
# front of JPEG base64-encoded
JPEG64 = b'/9'
# constants for identifying gif data
GIF_64 = b'R0lGODdh'
GIF89_64 = b'R0lGODlh'
# front of PDF base64-encoded
PDF64 = b'JVBER'
def encode_images(format_dict: Dict) -> Dict[str, str]:
"""b64-encodes images in a displaypub format dict
Perhaps this should be handled in json_clean itself?
Parameters
----------
format_dict : dict
A dictionary of display data keyed by mime-type
Returns
-------
format_dict : dict
A copy of the same dictionary,
but binary image data ('image/png', 'image/jpeg' or 'application/pdf')
is base64-encoded.
"""
# no need for handling of ambiguous bytestrings on Python 3,
# where bytes objects always represent binary data and thus
# base64-encoded.
if py3compat.PY3:
return format_dict
encoded = format_dict.copy()
pngdata = format_dict.get('image/png')
if isinstance(pngdata, bytes):
# make sure we don't double-encode
if not pngdata.startswith(PNG64):
pngdata = b2a_base64(pngdata)
encoded['image/png'] = pngdata.decode('ascii')
jpegdata = format_dict.get('image/jpeg')
if isinstance(jpegdata, bytes):
# make sure we don't double-encode
if not jpegdata.startswith(JPEG64):
jpegdata = b2a_base64(jpegdata)
encoded['image/jpeg'] = jpegdata.decode('ascii')
gifdata = format_dict.get('image/gif')
if isinstance(gifdata, bytes):
# make sure we don't double-encode
if not gifdata.startswith((GIF_64, GIF89_64)):
gifdata = b2a_base64(gifdata)
encoded['image/gif'] = gifdata.decode('ascii')
pdfdata = format_dict.get('application/pdf')
if isinstance(pdfdata, bytes):
# make sure we don't double-encode
if not pdfdata.startswith(PDF64):
pdfdata = b2a_base64(pdfdata)
encoded['application/pdf'] = pdfdata.decode('ascii')
return encoded
def json_clean(obj):
"""Clean an object to ensure it's safe to encode in JSON.
Atomic, immutable objects are returned unmodified. Sets and tuples are
converted to lists, lists are copied and dicts are also copied.
Note: dicts whose keys could cause collisions upon encoding (such as a dict
with both the number 1 and the string '1' as keys) will cause a ValueError
to be raised.
Parameters
----------
obj : any python object
Returns
-------
out : object
A version of the input which will not cause an encoding error when
encoded as JSON. Note that this function does not *encode* its inputs,
it simply sanitizes it so that there will be no encoding errors later.
"""
# types that are 'atomic' and ok in json as-is.
atomic_ok = (unicode_type, type(None))
# containers that we need to convert into lists
container_to_list = (tuple, set, types.GeneratorType)
# Since bools are a subtype of Integrals, which are a subtype of Reals,
# we have to check them in that order.
if isinstance(obj, bool):
return obj
if isinstance(obj, numbers.Integral):
# cast int to int, in case subclasses override __str__ (e.g. boost enum, #4598)
return int(obj)
if isinstance(obj, numbers.Real):
# cast out-of-range floats to their reprs
if math.isnan(obj) or math.isinf(obj):
return repr(obj)
return float(obj)
if isinstance(obj, atomic_ok):
return obj
if isinstance(obj, bytes):
if py3compat.PY3:
# unanmbiguous binary data is base64-encoded
# (this probably should have happened upstream)
return b2a_base64(obj).decode('ascii')
else:
# Python 2 bytestr is ambiguous,
# needs special handling for possible binary bytestrings.
# imperfect workaround: if ascii, assume text.
# otherwise assume binary, base64-encode (py3 behavior).
try:
return obj.decode('ascii')
except UnicodeDecodeError:
return b2a_base64(obj).decode('ascii')
if isinstance(obj, container_to_list) or (
hasattr(obj, '__iter__') and hasattr(obj, next_attr_name)
):
obj = list(obj)
if isinstance(obj, list):
return [json_clean(x) for x in obj]
if isinstance(obj, dict):
# First, validate that the dict won't lose data in conversion due to
# key collisions after stringification. This can happen with keys like
# True and 'true' or 1 and '1', which collide in JSON.
nkeys = len(obj)
nkeys_collapsed = len(set(map(unicode_type, obj)))
if nkeys != nkeys_collapsed:
raise ValueError(
'dict cannot be safely converted to JSON: '
'key collision would lead to dropped values'
)
# If all OK, proceed by making the new dict that will be json-safe
out = {}
for k, v in iteritems(obj):
out[unicode_type(k)] = json_clean(v)
return out
if isinstance(obj, datetime):
return obj.strftime(ISO8601)
# we don't understand it, it's probably an unserializable object
raise ValueError("Can't clean for JSON: %r" % obj)
nbclient-0.5.6/nbclient/output_widget.py 0000664 0000000 0000000 00000007656 14143421273 0020412 0 ustar 00root root 0000000 0000000 from typing import Any, Dict, List, Optional
from jupyter_client.client import KernelClient
from nbformat.v4 import output_from_msg
from .jsonutil import json_clean
class OutputWidget:
"""This class mimics a front end output widget"""
def __init__(
self, comm_id: str, state: Dict[str, Any], kernel_client: KernelClient, executor
) -> None:
self.comm_id: str = comm_id
self.state: Dict[str, Any] = state
self.kernel_client: KernelClient = kernel_client
self.executor = executor
self.topic: bytes = ('comm-%s' % self.comm_id).encode('ascii')
self.outputs: List = self.state['outputs']
self.clear_before_next_output: bool = False
def clear_output(self, outs: List, msg: Dict, cell_index: int) -> None:
self.parent_header = msg['parent_header']
content = msg['content']
if content.get('wait'):
self.clear_before_next_output = True
else:
self.outputs = []
# sync back the state to the kernel
self.sync_state()
if hasattr(self.executor, 'widget_state'):
# sync the state to the nbconvert state as well, since that is used for testing
self.executor.widget_state[self.comm_id]['outputs'] = self.outputs
def sync_state(self) -> None:
state = {'outputs': self.outputs}
msg = {'method': 'update', 'state': state, 'buffer_paths': []}
self.send(msg)
def _publish_msg(
self,
msg_type: str,
data: Optional[Dict] = None,
metadata: Optional[Dict] = None,
buffers: Optional[List] = None,
**keys
) -> None:
"""Helper for sending a comm message on IOPub"""
data = {} if data is None else data
metadata = {} if metadata is None else metadata
content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
msg = self.kernel_client.session.msg(
msg_type, content=content, parent=self.parent_header, metadata=metadata
)
self.kernel_client.shell_channel.send(msg)
def send(
self,
data: Optional[Dict] = None,
metadata: Optional[Dict] = None,
buffers: Optional[List] = None,
) -> None:
self._publish_msg('comm_msg', data=data, metadata=metadata, buffers=buffers)
def output(self, outs: List, msg: Dict, display_id: str, cell_index: int) -> None:
if self.clear_before_next_output:
self.outputs = []
self.clear_before_next_output = False
self.parent_header = msg['parent_header']
output = output_from_msg(msg)
if self.outputs:
# try to coalesce/merge output text
last_output = self.outputs[-1]
if (
last_output['output_type'] == 'stream'
and output['output_type'] == 'stream'
and last_output['name'] == output['name']
):
last_output['text'] += output['text']
else:
self.outputs.append(output)
else:
self.outputs.append(output)
self.sync_state()
if hasattr(self.executor, 'widget_state'):
# sync the state to the nbconvert state as well, since that is used for testing
self.executor.widget_state[self.comm_id]['outputs'] = self.outputs
def set_state(self, state: Dict) -> None:
if 'msg_id' in state:
msg_id = state.get('msg_id')
if msg_id:
self.executor.register_output_hook(msg_id, self)
self.msg_id = msg_id
else:
self.executor.remove_output_hook(self.msg_id, self)
self.msg_id = msg_id
def handle_msg(self, msg: Dict) -> None:
content = msg['content']
comm_id = content['comm_id']
assert comm_id == self.comm_id
data = content['data']
if 'state' in data:
self.set_state(data['state'])
nbclient-0.5.6/nbclient/tests/ 0000775 0000000 0000000 00000000000 14143421273 0016261 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/nbclient/tests/__init__.py 0000664 0000000 0000000 00000000000 14143421273 0020360 0 ustar 00root root 0000000 0000000 nbclient-0.5.6/nbclient/tests/base.py 0000664 0000000 0000000 00000003636 14143421273 0017555 0 ustar 00root root 0000000 0000000 import unittest
from nbformat import v4 as nbformat
class NBClientTestsBase(unittest.TestCase):
def build_notebook(self, with_json_outputs=False):
"""Build a notebook in memory for use with NotebookClient tests"""
outputs = [
nbformat.new_output("stream", name="stdout", text="a"),
nbformat.new_output("display_data", data={'text/plain': 'b'}),
nbformat.new_output("stream", name="stdout", text="c"),
nbformat.new_output("stream", name="stdout", text="d"),
nbformat.new_output("stream", name="stderr", text="e"),
nbformat.new_output("stream", name="stderr", text="f"),
nbformat.new_output("display_data", data={'image/png': 'Zw=='}), # g
nbformat.new_output("display_data", data={'application/pdf': 'aA=='}), # h
]
if with_json_outputs:
outputs.extend(
[
nbformat.new_output("display_data", data={'application/json': [1, 2, 3]}), # j
nbformat.new_output(
"display_data", data={'application/json': {'a': 1, 'c': {'b': 2}}}
), # k
nbformat.new_output("display_data", data={'application/json': 'abc'}), # l
nbformat.new_output("display_data", data={'application/json': 15.03}), # m
]
)
cells = [
nbformat.new_code_cell(source="$ e $", execution_count=1, outputs=outputs),
nbformat.new_markdown_cell(source="$ e $"),
]
return nbformat.new_notebook(cells=cells)
def build_resources(self):
"""Build an empty resources dictionary."""
return {'metadata': {}}
@classmethod
def merge_dicts(cls, *dict_args):
# Because this is annoying to do inline
outcome = {}
for d in dict_args:
outcome.update(d)
return outcome
nbclient-0.5.6/nbclient/tests/conftest.py 0000664 0000000 0000000 00000000406 14143421273 0020460 0 ustar 00root root 0000000 0000000 import os
# This is important for ipykernel to show the same string
# instead of randomly generated file names in outputs.
# See: https://github.com/ipython/ipykernel/blob/360685c6/ipykernel/compiler.py#L50-L55
os.environ["IPYKERNEL_CELL_NAME"] = ""
nbclient-0.5.6/nbclient/tests/fake_kernelmanager.py 0000664 0000000 0000000 00000001434 14143421273 0022436 0 ustar 00root root 0000000 0000000 from jupyter_client.manager import AsyncKernelManager
class FakeCustomKernelManager(AsyncKernelManager):
expected_methods = {'__init__': 0, 'client': 0, 'start_kernel': 0}
def __init__(self, *args, **kwargs):
self.log.info('FakeCustomKernelManager initialized')
self.expected_methods['__init__'] += 1
super().__init__(*args, **kwargs)
async def start_kernel(self, *args, **kwargs):
self.log.info('FakeCustomKernelManager started a kernel')
self.expected_methods['start_kernel'] += 1
return await super().start_kernel(*args, **kwargs)
def client(self, *args, **kwargs):
self.log.info('FakeCustomKernelManager created a client')
self.expected_methods['client'] += 1
return super().client(*args, **kwargs)
nbclient-0.5.6/nbclient/tests/files/ 0000775 0000000 0000000 00000000000 14143421273 0017363 5 ustar 00root root 0000000 0000000 nbclient-0.5.6/nbclient/tests/files/Autokill.ipynb 0000664 0000000 0000000 00000001227 14143421273 0022214 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import signal\n",
"pid = os.getpid()\n",
"os.kill(pid, signal.SIGTERM)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.3"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
nbclient-0.5.6/nbclient/tests/files/Check History in Memory.ipynb 0000664 0000000 0000000 00000000702 14143421273 0024704 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from IPython import get_ipython"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"ip = get_ipython()\n",
"assert ip.history_manager.hist_file == ':memory:'"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/Clear Output.ipynb 0000664 0000000 0000000 00000007222 14143421273 0022740 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from __future__ import print_function\n",
"from IPython.display import clear_output"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"9\n"
]
}
],
"source": [
"for i in range(10):\n",
" clear_output()\n",
" print(i)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"print(\"Hello world\")\n",
"clear_output()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Hello world"
]
}
],
"source": [
"print(\"Hello world\", end='')\n",
"clear_output(wait=True) # no output after this"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"world"
]
}
],
"source": [
"print(\"Hello\", end='')\n",
"clear_output(wait=True) # here we have new output after wait=True\n",
"print(\"world\", end='')"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Hello world'"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"handle0 = display(\"Hello world\", display_id=\"id0\")"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'world'"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"handle1 = display(\"Hello\", display_id=\"id1\")"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"handle1.update('world')"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"handle2 = display(\"Hello world\", display_id=\"id2\")\n",
"clear_output() # clears all output, also with display_ids"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Hello world'"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"handle3 = display(\"Hello world\", display_id=\"id3\")\n",
"clear_output(wait=True)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"world"
]
}
],
"source": [
"handle4 = display(\"Hello\", display_id=\"id4\")\n",
"clear_output(wait=True)\n",
"print('world', end='')"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"handle4.update('Hello world') # it is cleared, so it should not show up in the above cell"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
nbclient-0.5.6/nbclient/tests/files/Disable Stdin.ipynb 0000664 0000000 0000000 00000000523 14143421273 0023033 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"try:\n",
" input = raw_input\n",
"except:\n",
" pass\n",
"\n",
"name = input(\"name: \")"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/Empty Cell.ipynb 0000664 0000000 0000000 00000002413 14143421273 0022364 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Test that executing skips over an empty cell."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Code 1'"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"Code 1\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'Code 2'"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"Code 2\""
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/Error.ipynb 0000664 0000000 0000000 00000002413 14143421273 0021517 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "d200673b",
"metadata": {},
"outputs": [
{
"ename": "ZeroDivisionError",
"evalue": "division by zero",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mZeroDivisionError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m/tmp/ipykernel_1277493/182040962.py\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;36m0\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mZeroDivisionError\u001b[0m: division by zero"
]
}
],
"source": [
"0/0"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
nbclient-0.5.6/nbclient/tests/files/Factorials.ipynb 0000664 0000000 0000000 00000001247 14143421273 0022521 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"i, j = 1, 1"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2\n",
"3\n",
"5\n",
"8\n",
"13\n",
"21\n",
"34\n",
"55\n",
"89\n",
"144\n"
]
}
],
"source": [
"for m in range(10):\n",
" i, j = j, i + j\n",
" print(j)"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/HelloWorld.ipynb 0000664 0000000 0000000 00000000545 14143421273 0022505 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Hello World\n"
]
}
],
"source": [
"print(\"Hello World\")"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/Inline Image.ipynb 0000664 0000000 0000000 00000033775 14143421273 0022666 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from IPython.display import Image"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"image/png": [
"iVBORw0KGgoAAAANSUhEUgAAAMgAAABQCAYAAABcbTqwAAAABHNCSVQICAgIfAhkiAAAABl0RVh0\n",
"U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAACAASURBVHic7Z15eJNV2vDvJ0/2vUmapk3b\n",
"dG9pKYWyFARkd6GojOI2vs6HfjqKI17i+LnNfM68LpfMq4KjM6/44jbgqKCfKLIoUFrWUigglO5N\n",
"1yxttmZfnzzP90dImrZpmqY75nddVZJnO0nOfc597u0gBEFUQ4wYMUJCmuwGxIgxlYkJSIwYYYgJ\n",
"SIwYYYgJSIwYYYgJSIxh8Xg8SHV1Na+srEw42W2ZaMiT3YAYU5vLly9zdu7cmeXxeEh5eXnGVatW\n",
"6Se7TRPJlBGQbqONcr5Rzdaa7BSDxUExWl1ko81JNtmcFLPNRTbZnWTMi5PYdCrGYVAxLouK8ehU\n",
"jMemYVwmDeOz6Vg8j+lZXJBqEXAY2GR/nhsFq9VK8Xg8v1pNY1IF5Gy9in2yVsG72NLD69QYGQAI\n",
"ABAABMD1/wx4TYDR6qQYrQ4KaAGACDoHfK9JCEIUyESWFUXpvQ+sKNSjKImAGDGiZFIExGBxkl/b\n",
"U5laWa+O6xMEJMwVkfdxnCCQmnYNt6ZNwz14vkn0xsaVbRlJQteoGnyDg+M4kEi/2kkiLBP+rdR1\n",
"GRgPvnMov7JBHTfez5KrDKyN/7Uv/0h1M2+8nzXdwDAMOX/+PO/9999P37lzp2yy2zNVmXAB+eDH\n",
"y1KjzUUZ+D4SbgIZBS7MS9r69ck0lc486Jm/ZiorK+N27tyZVVNTI3C5XOhkt2eqMqECclmuYV5q\n",
"1fQbzedlJ8C7jy6DY2/eB6/cVwLpCWM/2NscHvKrn5el4fiY3zrGDc6ECkhVcw8n+DWXQYV3//dy\n",
"WFKQDGw6Fe5amA2v/ceScXl2TVsPt6FTwxiXm8e4YZlQATFYHP3UnNIFGUCn9LcT5EgFUJgWPy7P\n",
"r2nvZo7LjWPcsEyoFavX6pzUdUB9h5YFAJPq6HI6nSStVkvV6XQUp9OJZmZm2sVisTvS6+VyOUMm\n",
"kznJZPKUMV/jOA4EQSAoio64TV6vF+ns7KSr1Wp6YmKiMzU11RnNfQB8hge1Wk1Tq9U0j8dD4vF4\n",
"nszMTDuDwYhauZ5QAbG7sH4z1o9VcnjytllAp/Y1o1FhgJp27bg8X2uyT4qAWiwW9OTJk8LKykpR\n",
"T0/PIDWPy+W6c3JyzBs3buyi0Wghf8y9e/cmXrp0SaDX6+nbtm37hcPheMM986233spWKpUsAIBN\n",
"mza1FBQUWAEADh8+HH/w4EGp1+sNmEVqa2v5Tz/99Gz/661bt9aw2ewh749hGFJWViZsbGzkajQa\n",
"ul6vpxEEARKJxDF//nzD7bffrhnObFxTU8P+/vvvpSqViolhff2CTCbjSUlJ9vXr1ysLCwutYW8C\n",
"PgE7fvy4sLy8PEGn09EIguhn7mEymdiaNWvUt956q5ZCoYxY8CbVUWh1euCZ/zkO9y/Ng7mZYjhy\n",
"uQP2nqqfzCaNOfv27ZP8/PPPiV6vN9AJ2Gy2RywWOx0OB6pWq5lms5laXV0t0mq19GeffbYlVOc8\n",
"fvx4YnCnHg6Xy4X6rVPB1+E4jmAYRiKIvr7ify+C2yLHjx8XHj58ONFoNNIGHlQqlSylUsmqra3l\n",
"bdmypWWoDrl///6EAwcOSP2dmU6nYxKJxKlUKpkej4fU2dnJ/uCDD3LWrl2rXL9+fU+oe+A4DqdO\n",
"nRIcOnQoyWAw0FAUJSQSiUMsFjvtdjtZrVYzrFYrxW63k3/44YeUioqKhOeff75RIpFEPFsDTIFQ\n",
"kyttWrjSph3sFZ/m4DgOO3bsSLt8+bIQAEAsFjvWrVunKiwstAQLgF6vp/z973/PUqvVzI6ODvbb\n",
"b7+d8+qrrzZEq2YMR2lpqaa0tFRz6tQpwa5du9IBAAoLC3ufeeaZ1uGubWho4DU0NPAQBCHmzp2r\n",
"z8/PN+Xl5dnsdjt69uxZQXl5uQQAoLm5mfvDDz9INmzYoB54j48//ji1qqoqHgBAIBC4Hnnkkbac\n",
"nBwbiUQCj8eD1NTUcHbv3p1mtVopBw8eTO7u7mY8+eST7cH3aG5uZn7++edpGo2GQSaT8aVLl/bc\n",
"dddd3TweLxBihGEYcuDAAfGhQ4ekBEEgJpOJumvXrtQXXnihZSTf16QLyI2Kx+Mh+YUDAOD111+v\n",
"C6V2CIVCz3PPPdf82muv5VssFopKpWKePn06btmyZYYJbXCEpKenWx588MGu9PR0R/D7aWlpSgRB\n",
"iOPHjycCAJSVlUnWrl2rYTKZgcGgvr6e5ReO+Ph4x4svvtgU3KkpFApRXFxs5vP5zdu3b891Op3o\n",
"xYsXhTU1NfrCwkKL/7yGhga2RuOzSBYXF+t/97vfKQa2k0wmE+vXr+9xOByov00tLS1cl8tFGkqN\n",
"DUUsvmCCCKeT8/l8bN68eQHjQXl5uXhCGjVCMjMzza+88krTQOHwc9NNNwWEGsMwUkdHB93/Gsdx\n",
"2LNnT4r/9e23395vxA8mIyPDUVpaqvS/3rt3bzIepRPrjjvuCKhoBEEgKpVqkGoYjpiATBEWLlwY\n",
"6FxKpZLV2Ng45UzSFAolbC+VyWRODofj8b9WqVQBAampqeH4jQZsNtuzaNGi3nD3WrJkiYFMJuMA\n",
"AN3d3cyLFy9G5UFms9leNpsdaBOO4yOK2ZhQFeuvDy7ssLuwrtBHh1e5d5VdTThwrmVKjq6jJSMj\n",
"w4GiKO5fzHd1dTFyc3Ptk92ukSKTyazXrl2LA/Cpmf73Ozs7A9a7pKQk+3Bmajab7U1LS7O2tLRw\n",
"AXzfx/z5803RtIlGo3mtVmtUFswJFZB4HnNUeRpsBjWsaXO6w2QyMYvFQgUAMBgM1MluTzRQqdSQ\n",
"s4xarQ4ICI/H84Q6ZyBcLjdwXnd3Nz3cuePFmAiIXG2inWpQczu1FnqnzkLXmx3UgBmRGJjXAX2v\n",
"g/8/6Lygk68fs9idYyPQ5vNs6Ngm6/9sAgAQAGqCC5jZdmDNsIFgpRmQ8bEmhYLFYgUExGg0TksB\n",
"GQqtVhvo4JEKSLC6ptFopp+AdGgt1A+P1CZV1CgFfQ6aAR18yI4fyXkhjo0FXjsJ3N30wQICAC4V\n",
"HSyXfPpu91dWyHqzFWiJEf2goyXYb2CxWG4oCyOFQgnM/h6PJ6J1QLCKNtTMNN5EvUjfc0YufHD7\n",
"sZnlNUohQYTNdpq+2BrYcG1jPlivTUiQo9VqDQhFsHpxIyASiQJJa2ZzZKkHwYOEUDg5SW9RCciR\n",
"Kwreeweuyrw4cWMKRjBeCxla30gDInIvdrQELyRFItGIPL5TnYSEhEAHj1R9DP4+EhISnOPRruEY\n",
"sYBUy7Ws17+5mIHfqLNGKJwdTFB8PK7WM41GQw1WKeLj44ccMV0u17Qzz6empgZ8J11dXSyHwxH2\n",
"M3i9XiR43ZKSkhLS9zLejPiL/vDnOqnbi0+7H2jUdO9JBNw9boPCyZMnA153CoWCFxUVmYOP0+n0\n",
"gAUw2GQaLURwMNYEUFhYaJFIJHYAnxOxurqaH+78y5cvc/0ziEQiccyZM8cc7vzxYkQdvUllol/r\n",
"6uUMf+ZomYLxWLgdhd4T3KgvD+MJtlqt6Llz50T+1yUlJdqB0bopKSkBn8iJEyeGTZgZGNU6EIFA\n",
"EFjjGI3GCYlyXrt2bSA269ChQ4lWq3XIVN+KiorA97Fu3TrVZBWVGNFT95yRj08m0wTBpJF9nc5r\n",
"jy4HW3806kITr7zySkF5eblgYESu3W5Ht23blmUymagAvtmjtLR0UATr7NmzA57nuro6/q5du5JD\n",
"qSk1NTXsN998M1elUoX1xMtksoDKolarmd3d3eNuVi4pKTGmpqZaAQB0Oh39ww8/TB84cOA4Dp9+\n",
"+mlKY2MjHwAgIyPDUlJSYhzvtg1FxKZEHAcor1MJxrMxoyKCSSdbKvJ1CnuUYRymaj4QRFQVJvR6\n",
"Pf3LL79M379/f7JUKrXHx8c7tVotXS6Xc/yh5kwmE9u0aVOLSCQaZMFatWqVXi6Xsy9cuCACADh1\n",
"6lTCpUuXBOnp6VaBQOA2Go0UhULBMhgMg2KNEAQZ9O1wOBzvihUrusvLyyUYhpG2b9+eM2fOHENB\n",
"QYG5oKDAOh4jNolEghdeeKF5x44dadeuXYtramrivfzyyzOzs7PN6enpNpVKxWhubuao1WomgC8Q\n",
"8bHHHusY84aMgIgFpF1rptmc2LS2zRekiX1qir2JFdUNcDsKjlYaMDNHbHLcsmVL4+HDhxMaGhr4\n",
"jY2NvMbGxn6xRUKh0Ll58+YWqVQ65L0fffTRToFA4C4rK5NgGEay2WwUf1iHH6lUaisuLu5tbGzk\n",
"NDU18QAAhsqou//++1Verxc5c+ZMvMFgoJWVlSWWlZUlRpKQFS00Gg3fvHlz6zfffJNUUVGRYDAY\n",
"aFVVVfH+KF8AACqV6l2zZo16qFyQiSTiDn+1XR9dp5oisBhkrChTYgOCAHC2Rx8IaL3GjEZA8vPz\n",
"rfn5+VaFQkFvaGhgaTQautVqJctkMltBQYE1OTl5WDMmmUwmNmzYoF6xYoWutraW7Q+/4PP5nri4\n",
"OI9MJnP403ffeeedwFoxOOQ8GBRFiYcfflixdu3anqamJlZPTw/N5XKhdDo9IFCFhYXmzZs3NwEA\n",
"cDicYUOF1q1b17148WI9AEBSUlLIz0QikeD+++9XbdiwQd3W1sZobm5mdXd305OSkhw5OTk2mUzm\n",
"CDeDLVy40OhXEYPXUkOxcePGdrfbTQrXpqGIWEDqlMYpF106Ev5w16IuNoOGg6GMB7gj+jpQtnoW\n",
"wF1hI1HDkZyc7IxEGMIhFAo9N998c9g26PX6gKrFYrHCzgZCodCzaNGikHo+n8/H+Hy+JdSxUKSk\n",
"pDhTUlIi+nwoihJZWVn2rKysEQVlxsfHu+Pj4yP2E+Xl5dlGcv9gIlY0WzWWaVsyZ26u1HjP0gID\n",
"eIwoKD4YXRVBe/OUHyhwHIfe3t6AgITLL48RnohnkF7r4GqIE8IoLb4rZqfr//TbZb4Q+863UwEz\n",
"je5zuDVTPohQp9NR/dYyiURiH6/03V8DEQuI1emZVgt0cRzLtemO+Yq1C3J8qoP6Xwlgqhy9FQ4z\n",
"T/nvoaurK+CBzs/PjyqHIoaPUQjI2AxKCAAkCdjO7CS+LUPCc6AoQvTdeuAzCICQ9hhfBDAZJREZ\n",
"SXHOWWliO5/D8KkVLjUFOramga0maidfP3AnCl4nCVD6lCxkajQayV9//XUqgM+8u2DBgqjXSzEi\n",
"FBCvF0cw79gGJgo5DPczpbO7FhckWTgM2tjryI52GpgrudD9b+moFuWh8OjIgCZPqWBCj8eDlJWV\n",
"iY4ePSoxm81UAIC77767KzMzc1JimG4UIhIQFCURdArqdXq8Y9LRVhel6l7aME8REAyXggb2eia4\n",
"2hm+qNng5KoBs0hw7sagxCovAq4OBthbmYDbyb773Jjqt8vlIp0+fTquo6ODqVQqmWq1mukPdqTT\n",
"6VhpaanqtttuG58KfL8iIlax2HTKqAUEAYA/318iXzc/w7cuMB6LA83uFPBaKP06/sCkqUGJVUP9\n",
"23/tOAsFRTSsPwBFUUIoFDr1ev24ZMKRyWRi3759Kf7icAiCEGKx2JGXl2dev369erwcfb82RiIg\n",
"mM7iHJUF586SjJ518zOMgJlR6P5nKliqBVMyMDEcJBoeyfqDTCYTW7durW1paWFWVo6BcWAAKIoS\n",
"t9xyixpFUSI9Pd2WmZnpGEm9pxiREbGACDk0d7vWErUPQCpkOZ9bX6wCAADl2+ngqJ+euz6hI8v0\n",
"i8YRFil33nnnpIdi3OhE7CjMSuSNarH3yr0L2ukUMg6GA6JpKxwAAIzQRdNiTF3MNgf69ZEq4b8O\n",
"nhENf3Z/Ip5B8qT866PgyFUiLpPqmZeZYAOvBQXt18kjvsFUgjsn4rCLGFODl//5rez0L81xty6a\n",
"qQMA3UiujXgGmZnCj1pNyE6K88XC2BuYQEzz/fC482MCMs3AvL5qimTSyLcEj1hAUkUcN4dOGb7w\n",
"W4gmzEiO8wmXs2VaRwQDysaAPTOmYk0zsOthNyg6jgICALA4LyEqr2xO0vXZxzX6XOpJRbDCMG7b\n",
"8cYYN7z+GSSKmLQRCci6ualRbV9GRq9ntBHTrxpHPxLuiTnepiF+FQsdTxULAGB+ltgm4TMmpT7R\n",
"pMPMsQIr79f52ac5/hkERUc+Po/4is23zxy0WcmvguTfqya7CTGiw4v7VayRzyAjDt1ePSvZVHFN\n",
"pTt6VTFim/K0hX+zHuKWjMp6pek1k89caeGotL3Ubr2JKhHy3HNnpFnn5MrsNEr4rQA0vWbyxfp2\n",
"Vk2LkuXBMKQwK9k2Ny/NJhXHDXJa2hwu0uGzV/kpCUJXycyMkJl0ap2Rsq/ikuC3ty7U8TmD03Fr\n",
"W5WMK01dzLuWzellMfp7501WB1p1Tc6ub1MzUhOFrtnZKfZ06dBF7vxgXhyplSsYV5q7mLWtKpbJ\n",
"aifftqjQsH558aB1bV2bil51Tc7xYF6kIENqn52Tah/YjoGYrA70fG0rq7q+neP2YIhUHOdaUJBu\n",
"nZWV4sD6ZpCJ2cTzpbvndCn0Vnq90siO5vppBTXBBWl/jHrWvNTQwfz4h5OSc9fkfP9UH2DfCUhL\n",
"FNl3/nljsziOG9JCuOvgGdH7e46leLC+TUD3HrsACAKwcd0S5TMPrOkmBRkOHv7Lzly5QsNcNX+G\n",
"PpSAlF2o4/7xva+zCQIgns/x3Lt6/qCt3vaf/EXw9ZEqybLiXHNwx3z3i58Sdx+uTCQIAiGREAK/\n",
"Xnr2jqWzNa8+dqeCGkLQPZgX+eLwWdGXP51L0PRaAlmOCIIQW357qzL43Dallrbpb7uy1DoT3X8O\n",
"QRCIkMd2/23zva3z89NDCvyFujbWs9u+zLLaXWQAgOC2yRKFDr3JV4AuGjNvVALCplPwT/6wvPHj\n",
"soaEXRWNSRh2g1Za5C3shcy/dgCZG1Xg3+5DZ0XbvzqSCgBw28KZupuLc015skSHWMDFTv/SxHnr\n",
"84OydrWO+ch/fpK7+7XfNwi4/XPHX/zgG9lPlTUiBo3i/cO9KztXzJ1hwrxe5Oj5Ov6n+08lffbj\n",
"aWldm4q14+X/JfcLybwZaWa5QsOsqm3lYV4cGahWlJ2v4/tjOY9X1/NDCUjl1RZehjTeHjxD/eWj\n",
"71O+P3FJnJUstr36+F0dBRlSZ61cwfjTh9+l/XjqF7HZ5iC///xDbcH3MVrs6Oa3v8i82qLg0Chk\n",
"fMOqeT2Fmck2iYjndro8pFyZJLCmq2tT0Z/auivHZHOQn/jNcsUDt5boUBIJ/v1Tpeij7yqSn3jr\n",
"X7m7/vp4/cxMaT8z+4lLjZz/8/c9WS4PRpqTm2p+fP0y9YKCDFtnt556/EI978ufz0n8gjNhMwgA\n",
"AEoiwRNr8nvWFct6q+UaVpPayGzrNjMcLn992b4oWx6LPjaRpeQ4N5CFrpDh7kNG84aLEIaga/37\n",
"g8S7gZFlB3a+HXglw+7THY4vDldKvF4ceeLu5YqnNqzsFzd1y8KZJomI1/y7v+zMV2h66V/+dE70\n",
"9H2rAuccOnOV91NljYhCRvGdf3qksTArOdAxslISetKTRM6X/vFtZtW1Vv63ZRcE961eYAAAWL0g\n",
"37jn6HmJ1e4iV15tYS+dk9NPNTx3rTUQ5nOxoZ3ncLlJDFrf1gKd3XpqR7ee8fDamwJrrjNXmtnf\n",
"n7gkBgDY/tyDrakSoRsAoCgn1f7PF/6j5c4/vl948nJTXJtSqwpWt9776kji1RYFR8BluXf/5+ON\n",
"yQmCIXNo3vrsQGqvxU5ZvSBf/9S9fd/VUxtW9sgVGsax83XCz348lfDusw+0+495MC/yxqc/ylwe\n",
"jJSeJLL/44WH5ezrM15mstiVmSzWPFx6k7b02e0zdUYrNRoz76jTR6VCllsqTHcDwPhnrvFu1kPi\n",
"49Nmsex3UNEo5JD686ysFMeMtCRrXZuKvf/k5fin7l3ZQ0IQcHkw5L2vjqQAAKycN8MQLBx+bltU\n",
"aNpXccl0rkbO/+i7Culdy4p7aRQyMS8/3cZnMzxGq4NSdqGOFywgV5o6mXqTlbpiXp6+vLpB6HJj\n",
"pIqLDZzbb5oVSMs9WlXLAwBYNX9G4L0vfz4nBgDIShHb/MLhR5YocicIuK4eg5m25+h54UsbSwO/\n",
"j9vjK4gXx2Vh4YSjrk1Fv9qi4AAArF6QP6gfLSjIMB87XyesuNggsNidnRymL5r627JqgcZgpgEA\n",
"PH3faiU7xDqFTqUQpOuF88bdUTjpYEYy2OqZfX91Q/zV9v1Zg/6cnSPa4XS0BBxU5KFHrvkF6WYA\n",
"gB6DmdbYrqYDAFxt7mL2XP/h1y6eNeR20LctKjQAAOiMVmpDm4oBAEBCECgpzDQBAJy52tKvQHRZ\n",
"tS9I9Pe/Wd6dKOI5AQCOX6jvd87pK808AZflmZMrswMAuD0YUnlVzgcAWDlvRshBMD8jyQoA0KUx\n",
"9Mt9CYR4DNMxD56+IgAAoFHJ+Ip5MwYVqfavPTAvjnSo+8oZHTpzRQjgK3Q51PqkXzuiqBY55QsQ\n",
"9MN0Ih6MFfGRJ0wN+De72AjZ78gnqrkB82KYxaGQywro+Wq9iTojPclZf72zAwAMHLGDSUmICxxr\n",
"VeloRTmpdgCAlfNmGH+uvCbSGMy0a3Ilw6+3n7nSzEsQcF356UnOJUU5xm/KLviMBzjegZJIYHW4\n",
"SDUtCs7qBfkBh3C7Wkfzf45DZ66KTlxqjAMAgoQggFxf92iNvm3j9Mb+G2X6rxvOQdfZ3Vcu9eFX\n",
"/ycXQRACQRBAAALPCHxHOiPF/3kUGl9po1SJ0MFjM4ZU4wO/Q5iBaiiml4BMM7AIHFRsZt/6rFvv\n",
"K0nUptIFRuL4OM6Q+Sc8Vt+mqEpNbyCZbVlxroVGIeMuD0Y6WlXLn5kpdXTrTZSWLg3r7hVzewAA\n",
"Vi3IN31TdkFitjnJ52rk7MVF2daKiw1cD+YlLZ+bF1Cv2pS6QOdNlQgdDNrgraATRTwXAICQ17+t\n",
"fhVzuBlEpfV1dC6LgaUkCEI6Y/3P8FvVbA4XyWC2UQEAZmYmh10r+o1I0XjSYwIyjvhVLEqYkcvt\n",
"6TPfErjvtGChaVVqaUXZoTeP6bXYAr+fWNDXORk0Kl6cJzNX1sj5p39p4m357S3qY+fr/GsLIwDA\n",
"goJ0K4/N8JisDsrRqlr+4qJs68lLjTwqhYwvK84NrFs8QdXon75vlbogQxpxsCYWYQyUf4SXJQod\n",
"27Y82B7JvYPXE8N1fAyPTFBDMb3WIJGATJ26blgEKoa2t2+/Pr8FKFHEC6hOjR3qIQM8e/R912an\n",
"9N+i7ObiXCMAQItCw1JpeymnfmnisZk0bGFhlu16m2DhzOtrlSvNfJwgoKq2lTcnJ9UcbNXKShYH\n",
"7mu1jyyWzhuhgy5VInQCADicnojvT6dSCA7Tt6lQU2d32EzXSNsRihtHQOgZAKkvAczcB5D1DgC3\n",
"ZLJbFBRFOvQPE6xOZaf6OnlwZ2/u7BlSQJoVvmM0ChnPSZX0E5BbSmaa/Nse7D/5S9zlhg7ugvwM\n",
"U3BbVl6fTTS9Ftq/D1eKjBY7ZemcnH6F5jKk8S4SyXef2lbliFKuI12kpyfFOwEAunr0DJcHizhc\n",
"2q92tSm1TGygEzYI/wxFGe9o3tEzjqHishcB4lYAkCgA7CKAtFcB0LGpFRcNHqxPNRlqcWi2OdAz\n",
"V5vjAADm5qWZ4vm+6unz89Nt2SkJNgCAps6ekJ0SJwg4fLZGCABwz8p5PQNDMUR8NjYjLdEGALD7\n",
"8NlElwfrt7YA6FurAADs+K5cCgCwekFBv3OoFDKRIY23AwB8V35xRBsoRTpyF+fJrAAAZpuT/H3F\n",
"pYg3KVq7eJYeAMDlwUi1rcqQ1WO8OB6w00z9GYQyfMxOVLBmAdDT+r9HogII1/Z/jyqZsK2EgwXk\n",
"5OUmntHSf1crL47DXz76PsXl9vkKnrxnuTr4+KN3LlUDAPzS1Mn95tiFQVVRdnxbnqAxmGlsJg17\n",
"8p4VIYs3LJ3jK7tqtbvIZJRErAzybQD0rVX852SliG2JIt4go8BrT/ymg0JG8a4eA2Pr5weTQj3r\n",
"aksX40pzV7/ZLtJF+rLiXMuakgIdAMCO/1curWtTDersJqsDPXjmCh8PcgLfu3qBnkmnegEA3vhk\n",
"vyzU7FNe3RAYJf3+k5EwsYt0Rq4NjGVjf198iGzgge+zCqIugz9SggXk4Okr8ccv1AtuLs7pzUoW\n",
"O5xujHTyUiO/uauHBQDwxN3LFQsK+sdNrV08y9TRrVd89F158t92HZI1dfYwlszONlvsTrTiYgPv\n",
"aFWtSMRnu7c9+4B8KBPnLSUFxo++q0gGACjKTjWH6iDL5+YZK2t8fo7FRdkh6/gWZEgdT9+7SrH9\n",
"qyOpXx2pSmzq7GHeVJRliudzsHM1ck5VbStPb7JS581IM33yfx9t8V/nvW50iMSD/dffr++6Jlew\n",
"1ToT/bHXP8srXVKknZkptat0RurZK8282lYV24vjiDiO2+D3ebAZNPzdZx9o2bLtq6ymzh7WY69/\n",
"mn3nzXN0yQkCd0O7inHkXK2grk3FnpGWaH3uoVsHfceRMMECUjCq0I0hcbQA2OoBWDP63vPaAQxH\n",
"+5/HmTM+zw9BsIBsvn91Z0O7mnmsqk74c+W1wPsiPtv91IaVintWzgvpgNt0z4oemUTo/O9vj0u/\n",
"KTsv2XvsvAQAgEGjektmZhhff/LujgRB6CBHAICslARXsjjOaXe60Q2r5oVM9lpTUmD6265DRLJY\n",
"4Fy3pGjIaIiNdyzRshg0747vyqUXG9p5FxvaAyErSSK+87mHbu144JaSfgl1I0l1ZTNo+Cd/frTp\n",
"7S8OS8urG4R7j52X7D3mO0Yho/jNxTmGJ36zvHtGev8NcG6alWV9//mHmv9r96GUGrmS7ffIC7gs\n",
"T4Y03v7mprvl65bOjnqPQ4QgiOpoL46K7o+lYPxJAgBDV0mMprIiWQggvAMgbhWA9TKA9juf4Pin\n",
"5LhVWkh7pXNcP1sQap2Jctsz784CAHj/+YealhXnWowWOypXaGiaXjOlIEPqCOcEHIjZ5kCvNncx\n",
"4uO4WHZqgpMUYepvZ7eeKhXHudEw4OU8RgAAAbxJREFUXuR2lY6aliSKqC1eHAdFj4HaqtTSeGym\n",
"NytF7OKyQs9gLg+G4DiOkFGUCGfqHojZ5kDlCg2t12InZyTFO1MkgrDt92O02NH6djUjK0Xs9K/n\n",
"RsvECwjhRaDjT9ngbOGMqYCE86QzMm2Q849GIFEnrIxjV4+Bum7Le4UAAP/94sONi4uyJ2z2ijF2\n",
"TLyZF0EJSH5ZDqy5Q8YYjSmsmWbIeEM+kcIB0F/FGsnoGWNqMTmedDLPCykvt4HpRC/0fJYKXvPY\n",
"716FsjFIfLQLRHdMjCAOwB1kUYkJyPRlckNNeMuMwL3JBKbTPDAeE4G9jje6YtYIADPfDHGr9CBY\n",
"bQTS5G1yEzyDRGN/jzE1mPxYLIRCAH+FEfgrjOAxkMF2lQ32OjY4mlng6mKGLRWEUHGgp9mAmWMD\n",
"Zr4V2EU2oAjGZHE2WrCgGKZoPLgxpgaTLyDBUAQY8Jcbgb+8zyzntZHAo6cAZiAD7iYByvICyvEC\n",
"mesFlIsBMjWjZThMulfEZ7t1Ris1pmJNXybeivUrAicIOHulmV2YlRI2XyHG1CUmIDFihGFq6icx\n",
"YkwRYgISI0YYYgISI0YY/j+SFgT3yDrlYgAAAABJRU5ErkJggg==\n"
],
"text/plain": [
""
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"Image('python.png')"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/Interrupt.ipynb 0000664 0000000 0000000 00000002240 14143421273 0022420 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mwhile\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mKeyboardInterrupt\u001b[0m: "
]
}
],
"source": [
"while True: continue"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"done\n"
]
}
],
"source": [
"print(\"done\")"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/JupyterWidgets.ipynb 0000664 0000000 0000000 00000004122 14143421273 0023416 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "f46f26da84b54255bccc3a69d7eb08de",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Label(value='Hello World')"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import ipywidgets\n",
"label = ipywidgets.Label('Hello World')\n",
"label"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"# it should also handle custom msg'es\n",
"label.send({'msg': 'Hello'})"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.4"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
"8273e8fe9d9941a4a63c062158e0a630": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.4.0",
"model_name": "DescriptionStyleModel",
"state": {
"description_width": ""
}
},
"a72770a4f541425f8fe85833a3dc2a8e": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "1.4.0",
"model_name": "LabelModel",
"state": {
"context_menu": null,
"layout": "IPY_MODEL_dec20f599109458ca607b1df5959469b",
"style": "IPY_MODEL_8273e8fe9d9941a4a63c062158e0a630",
"value": "Hello World"
}
},
"dec20f599109458ca607b1df5959469b": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.1.0",
"model_name": "LayoutModel",
"state": {}
}
},
"version_major": 2,
"version_minor": 0
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/Other Comms.ipynb 0000664 0000000 0000000 00000002447 14143421273 0022555 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"ExecuteTime": {
"end_time": "2020-05-29T11:16:26.365338Z",
"start_time": "2020-05-29T11:16:26.362047Z"
}
},
"outputs": [],
"source": [
"from ipykernel.comm import Comm"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"ExecuteTime": {
"end_time": "2020-05-29T11:16:26.377700Z",
"start_time": "2020-05-29T11:16:26.371603Z"
}
},
"outputs": [],
"source": [
"comm = Comm('this-comm-tests-a-missing-handler', data={'id': 'foo'})"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"ExecuteTime": {
"end_time": "2020-05-29T11:16:26.584520Z",
"start_time": "2020-05-29T11:16:26.581213Z"
}
},
"outputs": [],
"source": [
"comm.send(data={'id': 'bar'})"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/Output.ipynb 0000664 0000000 0000000 00000047561 14143421273 0021743 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "e152547dd69d46fcbcb602cf9f92e50b",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Output()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import ipywidgets as widgets\n",
"from IPython.display import clear_output\n",
"output1 = widgets.Output()\n",
"output1"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"hi\n"
]
}
],
"source": [
"print(\"hi\")\n",
"with output1:\n",
" print(\"in output\")"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"with output1:\n",
" raise ValueError(\"trigger msg_type=error\")"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "44dc393cd7c6461a8c4901f85becfc0e",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Output()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import ipywidgets as widgets\n",
"output2 = widgets.Output()\n",
"output2"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"hi2\n"
]
}
],
"source": [
"print(\"hi2\")\n",
"with output2:\n",
" print(\"in output2\")\n",
" clear_output(wait=True)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "d6cd7a1de3494d2daff23c6d4ffe42ee",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Output()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import ipywidgets as widgets\n",
"output3 = widgets.Output()\n",
"output3"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"hi3\n"
]
}
],
"source": [
"print(\"hi3\")\n",
"with output3:\n",
" print(\"hello\")\n",
" clear_output(wait=True)\n",
" print(\"world\")"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "10517a9d5b1d4ea386945642894dd898",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Output()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import ipywidgets as widgets\n",
"output4 = widgets.Output()\n",
"output4"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"hi4\n"
]
}
],
"source": [
"print(\"hi4\")\n",
"with output4:\n",
" print(\"hello world\")\n",
" clear_output()"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "37f7ba6a9ecc4c19b519e718cd12aafe",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Output()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import ipywidgets as widgets\n",
"output5 = widgets.Output()\n",
"output5"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"print(\"hi5\")\n",
"with output5:\n",
" display(\"hello world\") # this is not a stream but plain text\n",
"clear_output()"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "4fb0ee7e557440109c08547514f03c7b",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Output()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import ipywidgets as widgets\n",
"output_outer = widgets.Output()\n",
"output_inner = widgets.Output()\n",
"output_inner"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "01ea355e26484c13b1caaaf6d29ac0f2",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Output()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"output_outer"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"with output_inner:\n",
" print('in inner')\n",
" with output_outer:\n",
" print('in outer')\n",
" print('also in inner')"
]
}
],
"metadata": {
"kernelspec": {
"language": "python"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
"01ea355e26484c13b1caaaf6d29ac0f2": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_7213e178683c4d0682b3c848a2452cf1",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in outer\n"
}
]
}
},
"025929abe8a143a08ad23de9e99c610f": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"03c04d8645a74c4dac2e08e2142122a6": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"091f6e59c48442b1bdb13320b4f6605d": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"10517a9d5b1d4ea386945642894dd898": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_2c67de94f62d4887866d22abca7f6f13"
}
},
"106de0ded502439c873de5449248b00c": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"1b9529b98aaf40ccbbf38e178796be88": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"22592f3cb7674cb79cc60def5e8bc060": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"2468aac6020349139ee6236b5dde0310": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_d5e88b6a26114d6da0b7af215aa2c3bb"
}
},
"2955dc9c531c4c6b80086da240d0df13": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_1b9529b98aaf40ccbbf38e178796be88",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "world\n"
}
]
}
},
"2c67de94f62d4887866d22abca7f6f13": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"37f7ba6a9ecc4c19b519e718cd12aafe": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_03c04d8645a74c4dac2e08e2142122a6",
"outputs": [
{
"data": {
"text/plain": "'hello world'"
},
"metadata": {},
"output_type": "display_data"
}
]
}
},
"3945ce528fbf40dc830767281892ea56": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"3c6bb7a6fd4f4f8786d30ef7b2c7c050": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"3e0e8f5d18fe4992b11e1d5c13faecdf": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"44dc393cd7c6461a8c4901f85becfc0e": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_3c6bb7a6fd4f4f8786d30ef7b2c7c050",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in output2\n"
}
]
}
},
"45823daa739447a6ba5393e45204ec8e": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_3e0e8f5d18fe4992b11e1d5c13faecdf",
"outputs": [
{
"data": {
"text/plain": "'hello world'"
},
"metadata": {},
"output_type": "display_data"
}
]
}
},
"4fa2d1a41bd64017a20e358526ad9cf3": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_6490daaa1d2e42a0aef909e7b8c8eff4",
"outputs": [
{
"data": {
"text/plain": "'hello world'"
},
"metadata": {},
"output_type": "display_data"
}
]
}
},
"4fb0ee7e557440109c08547514f03c7b": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_dbf140d66ba247b7847c0f5642b7f607",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in inner\nalso in inner\n"
}
]
}
},
"55aff5c4b53f440a868919f042cf9c14": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_a14653416772496aabed04b4719268ef",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in inner\nalso in inner\n"
}
]
}
},
"5747ce87279c44519b9df62799e25e6f": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_6ef78dc31eec422ab2afce4be129836f",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in output2\n"
}
]
}
},
"6490daaa1d2e42a0aef909e7b8c8eff4": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"6ef78dc31eec422ab2afce4be129836f": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"7134e81fdb364a738c1e58b26ec0d008": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_025929abe8a143a08ad23de9e99c610f",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in inner\nalso in inner\n"
}
]
}
},
"7213e178683c4d0682b3c848a2452cf1": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"804b6628ca0a48dfbad930615626b1fb": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"a14653416772496aabed04b4719268ef": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"a32671b19b814cf5bd964c36368f9f79": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_c843c22ff72e4983984ca4d62ce68e2b",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in outer\n"
}
]
}
},
"aaf673ac9c774aaba4f751db2f3dd6c5": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_106de0ded502439c873de5449248b00c",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in output2\n"
}
]
}
},
"bc3d9af2591e4a52af73921f46d79efa": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_22592f3cb7674cb79cc60def5e8bc060"
}
},
"c843c22ff72e4983984ca4d62ce68e2b": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"cc022dc8b5584570a04facf68f9bdf0b": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_3945ce528fbf40dc830767281892ea56",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in outer\n"
}
]
}
},
"d0cb56db68f2485480da1b2a43ad3c02": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_df4468e2240a430599a01e731472c319",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in output\n"
},
{
"ename": "ValueError",
"evalue": "trigger msg_type=error",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0moutput1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"trigger msg_type=error\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mValueError\u001b[0m: trigger msg_type=error"
]
}
]
}
},
"d314a6ef74d947f3a2149bdf9b8b57a3": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_804b6628ca0a48dfbad930615626b1fb",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in output\n"
}
]
}
},
"d5e88b6a26114d6da0b7af215aa2c3bb": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"d6cd7a1de3494d2daff23c6d4ffe42ee": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_091f6e59c48442b1bdb13320b4f6605d",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "world\n"
}
]
}
},
"dbf140d66ba247b7847c0f5642b7f607": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"de7ba4c0eed941a3b52fa940387d1415": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"df4468e2240a430599a01e731472c319": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"e152547dd69d46fcbcb602cf9f92e50b": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_de7ba4c0eed941a3b52fa940387d1415",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "in output\n"
},
{
"ename": "ValueError",
"evalue": "trigger msg_type=error",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0moutput1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"trigger msg_type=error\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mValueError\u001b[0m: trigger msg_type=error"
]
}
]
}
},
"e27795e5a4f14450b8c9590cac51cb6b": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "1.2.0",
"model_name": "LayoutModel",
"state": {}
},
"e3e20af587534a9bb3fa413951ceb28d": {
"model_module": "@jupyter-widgets/output",
"model_module_version": "1.0.0",
"model_name": "OutputModel",
"state": {
"layout": "IPY_MODEL_e27795e5a4f14450b8c9590cac51cb6b",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": "world\n"
}
]
}
}
},
"version_major": 2,
"version_minor": 0
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/Parallel Execute A.ipynb 0000664 0000000 0000000 00000004665 14143421273 0023721 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Ensure notebooks can execute in parallel\n",
"\n",
"This notebook uses a file system based \"lock\" to assert that two instances of the notebook kernel will run in parallel. Each instance writes to a file in a temporary directory, and then tries to read the other file from\n",
"the temporary directory, so that running them in sequence will fail, but running them in parallel will succed.\n",
"\n",
"Two notebooks are launched, each which sets the `this_notebook` variable. One notebook is set to `this_notebook = 'A'` and the other `this_notebook = 'B'`."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import os.path\n",
"import tempfile\n",
"import time"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# the variable this_notebook is injectected in a cell above by the test framework.\n",
"this_notebook = 'A'\n",
"other_notebook = 'B'\n",
"directory = os.environ['NBEXECUTE_TEST_PARALLEL_TMPDIR']\n",
"with open(os.path.join(directory, 'test_file_{}.txt'.format(this_notebook)), 'w') as f:\n",
" f.write('Hello from {}'.format(this_notebook))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"start = time.time()\n",
"timeout = 5\n",
"end = start + timeout\n",
"target_file = os.path.join(directory, 'test_file_{}.txt'.format(other_notebook))\n",
"while time.time() < end:\n",
" time.sleep(0.1)\n",
" if os.path.exists(target_file):\n",
" with open(target_file, 'r') as f:\n",
" text = f.read()\n",
" if text == 'Hello from {}'.format(other_notebook):\n",
" break\n",
"else:\n",
" assert False, \"Timed out – didn't get a message from {}\".format(other_notebook)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/Parallel Execute B.ipynb 0000664 0000000 0000000 00000004665 14143421273 0023722 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Ensure notebooks can execute in parallel\n",
"\n",
"This notebook uses a file system based \"lock\" to assert that two instances of the notebook kernel will run in parallel. Each instance writes to a file in a temporary directory, and then tries to read the other file from\n",
"the temporary directory, so that running them in sequence will fail, but running them in parallel will succed.\n",
"\n",
"Two notebooks are launched, each which sets the `this_notebook` variable. One notebook is set to `this_notebook = 'A'` and the other `this_notebook = 'B'`."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import os.path\n",
"import tempfile\n",
"import time"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# the variable this_notebook is injectected in a cell above by the test framework.\n",
"this_notebook = 'B'\n",
"other_notebook = 'A'\n",
"directory = os.environ['NBEXECUTE_TEST_PARALLEL_TMPDIR']\n",
"with open(os.path.join(directory, 'test_file_{}.txt'.format(this_notebook)), 'w') as f:\n",
" f.write('Hello from {}'.format(this_notebook))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"start = time.time()\n",
"timeout = 5\n",
"end = start + timeout\n",
"target_file = os.path.join(directory, 'test_file_{}.txt'.format(other_notebook))\n",
"while time.time() < end:\n",
" time.sleep(0.1)\n",
" if os.path.exists(target_file):\n",
" with open(target_file, 'r') as f:\n",
" text = f.read()\n",
" if text == 'Hello from {}'.format(other_notebook):\n",
" break\n",
"else:\n",
" assert False, \"Timed out – didn't get a message from {}\".format(other_notebook)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/SVG.ipynb 0000664 0000000 0000000 00000001722 14143421273 0021067 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from IPython.display import SVG"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"image/svg+xml": [
""
],
"text/plain": [
""
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"SVG(data='''\n",
"''')"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/Skip Exceptions with Cell Tags.ipynb 0000664 0000000 0000000 00000002507 14143421273 0026115 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"tags": [
"raises-exception"
]
},
"outputs": [
{
"ename": "Exception",
"evalue": "message",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mException\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# üñîçø∂é\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"message\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mException\u001b[0m: message"
]
}
],
"source": [
"# üñîçø∂é\n",
"raise Exception(\"message\")"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"ok\n"
]
}
],
"source": [
"print('ok')"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 1
}
nbclient-0.5.6/nbclient/tests/files/Skip Exceptions.ipynb 0000664 0000000 0000000 00000002515 14143421273 0023441 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"ename": "Exception",
"evalue": "message",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mException\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[1;31m# üñîçø∂é\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[1;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"message\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[1;31mException\u001b[0m: message"
]
}
],
"source": [
"# üñîçø∂é\n",
"raise Exception(\"message\")"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"ok\n"
]
}
],
"source": [
"print('ok')"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/Skip Execution with Cell Tag.ipynb 0000664 0000000 0000000 00000000777 14143421273 0025563 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": null,
"source": [
"print(\"a long running cell\")"
],
"outputs": [],
"metadata": {
"tags": [
"skip-execution"
]
}
},
{
"cell_type": "code",
"execution_count": 1,
"source": [
"print('ok')"
],
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"ok\n"
]
}
],
"metadata": {}
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 1
}
nbclient-0.5.6/nbclient/tests/files/Sleep1s.ipynb 0000664 0000000 0000000 00000002255 14143421273 0021746 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import time\n",
"import datetime"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"t0 = datetime.datetime.utcnow()\n",
"time.sleep(1)\n",
"t1 = datetime.datetime.utcnow()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"time_format = '%Y-%m-%dT%H:%M:%S.%fZ'\n",
"print(t0.strftime(time_format), end='')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(t1.strftime(time_format), end='')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.1"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
nbclient-0.5.6/nbclient/tests/files/Unicode.ipynb 0000664 0000000 0000000 00000000531 14143421273 0022013 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u2603\n"
]
}
],
"source": [
"print('\u2603')"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/UnicodePy3.ipynb 0000664 0000000 0000000 00000000676 14143421273 0022421 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u2603\n"
]
}
],
"source": [
"print('\u2603')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
nbclient-0.5.6/nbclient/tests/files/python.png 0000664 0000000 0000000 00000021066 14143421273 0021417 0 ustar 00root root 0000000 0000000 PNG
IHDR P \m: sBIT|d tEXtSoftware www.inkscape.org< IDATxyxU'OIMtoi)dw6·~:#^|μ.̫36ਠ((PZR( M,mf_