pax_global_header00006660000000000000000000000064141544553720014524gustar00rootroot0000000000000052 comment=ea43db97fc07f8e4dc9740cf73b8cb7763444876 jupyter_server-1.13.1/000077500000000000000000000000001415445537200146775ustar00rootroot00000000000000jupyter_server-1.13.1/.babelrc000066400000000000000000000000341415445537200162670ustar00rootroot00000000000000{ "presets": ["es2015"] } jupyter_server-1.13.1/.eslintignore000066400000000000000000000000651415445537200174030ustar00rootroot00000000000000*.min.js *components* *node_modules* *built* *build* jupyter_server-1.13.1/.eslintrc.json000066400000000000000000000003151415445537200174720ustar00rootroot00000000000000{ "parserOptions": { "ecmaVersion": 6, "sourceType": "module" }, "rules": { "semi": 1, "no-cond-assign": 2, "no-debugger": 2, "comma-dangle": 0, "no-unreachable": 2 } } jupyter_server-1.13.1/.git-blame-ignore-revs000066400000000000000000000001071415445537200207750ustar00rootroot00000000000000# Initial pre-commit reformat 42fe3bb4188a1fbd1810674776e7855cd529b8fc jupyter_server-1.13.1/.gitconfig000066400000000000000000000000641415445537200166510ustar00rootroot00000000000000[blame] ignoreRevsFile = .git-blame-ignore-revs jupyter_server-1.13.1/.github/000077500000000000000000000000001415445537200162375ustar00rootroot00000000000000jupyter_server-1.13.1/.github/workflows/000077500000000000000000000000001415445537200202745ustar00rootroot00000000000000jupyter_server-1.13.1/.github/workflows/check-release.yml000066400000000000000000000014741415445537200235200ustar00rootroot00000000000000name: Check Release on: push: branches: ["master"] pull_request: branches: ["*"] jobs: check_release: runs-on: ubuntu-latest strategy: matrix: group: [check_release, link_check] steps: - name: Checkout uses: actions/checkout@v2 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install Dependencies run: | pip install -e . - name: Check Release if: ${{ matrix.group == 'check_release' }} uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v1 with: token: ${{ secrets.GITHUB_TOKEN }} - name: Run Link Check if: ${{ matrix.group == 'link_check' }} uses: jupyter-server/jupyter_releaser/.github/actions/check-links@v1 jupyter_server-1.13.1/.github/workflows/codeql-analysis.yml000066400000000000000000000046721415445537200241200ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # ******** NOTE ******** name: "CodeQL" on: push: branches: ["master"] pull_request: # The branches below must be a subset of the branches above branches: [master] schedule: - cron: "25 18 * * 4" permissions: security-events: write jobs: analyze: name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: ["python"] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more... # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: - name: Checkout repository uses: actions/checkout@v2 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v1 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main queries: security-and-quality # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v1 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v1 jupyter_server-1.13.1/.github/workflows/downstream.yml000066400000000000000000000024161415445537200232050ustar00rootroot00000000000000name: Test downstream projects on: push: branches: "*" pull_request: branches: "*" jobs: tests: runs-on: ubuntu-latest timeout-minutes: 15 steps: - name: Checkout uses: actions/checkout@v2 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Test jupyterlab_server uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: jupyterlab_server - name: Test jupyterlab uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: jupyterlab test_command: "python -m jupyterlab.browser_check --no-browser-test" - name: Test nbclassic uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: nbclassic - name: Test run nbclassic run: | pip install nbclassic pip install --force-reinstall "." # Make sure we can start and kill the nbclassic server jupyter nbclassic --no-browser & TASK_PID=$! # Make sure the task is running ps -p $TASK_PID || exit 1 sleep 5 kill $TASK_PID wait $TASK_PID jupyter_server-1.13.1/.github/workflows/enforce-label.yml000066400000000000000000000004241415445537200235150ustar00rootroot00000000000000name: Enforce PR label on: pull_request: types: [labeled, unlabeled, opened, edited, synchronize] jobs: enforce-label: runs-on: ubuntu-latest steps: - name: enforce-triage-label uses: jupyterlab/maintainer-tools/.github/actions/enforce-label@v1 jupyter_server-1.13.1/.github/workflows/integration-tests.yml000066400000000000000000000014011415445537200244760ustar00rootroot00000000000000name: Jupyter Server Integration Tests [Linux] on: push: branches: "master" pull_request: branches: "*" jobs: build: runs-on: ${{ matrix.os }}-latest strategy: fail-fast: false matrix: os: [ubuntu] python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "pypy3"] steps: - name: Checkout uses: actions/checkout@v2 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install the Python dependencies run: | pip install -e ".[test]" - name: List installed packages run: | pip freeze pip check - name: Run the tests run: | pytest -vv --integration_tests=true jupyter_server jupyter_server-1.13.1/.github/workflows/python-linux.yml000066400000000000000000000045501415445537200235010ustar00rootroot00000000000000name: Jupyter Server Tests [Linux] on: push: branches: "master" pull_request: branches: "*" jobs: # Run "pre-commit run --all-files" pre-commit: runs-on: ubuntu-20.04 timeout-minutes: 2 steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: 3.8 # ref: https://github.com/pre-commit/action - uses: pre-commit/action@v2.0.0 - name: Help message if pre-commit fail if: ${{ failure() }} run: | echo "You can install pre-commit hooks to automatically run formatting" echo "on each commit with:" echo " pre-commit install" echo "or you can run by hand on staged files with" echo " pre-commit run" echo "or after-the-fact on already committed files with" echo " pre-commit run --all-files" build: runs-on: ${{ matrix.os }}-latest strategy: fail-fast: false matrix: os: [ubuntu] python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "pypy3"] steps: - name: Checkout uses: actions/checkout@v2 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install the Python dependencies run: | pip install -e ".[test]" codecov - name: List installed packages run: | pip freeze pip check - name: Run the tests if: ${{ matrix.python-version != 'pypy3' }} run: | pytest -vv jupyter_server --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered - name: Run the tests on pypy if: ${{ matrix.python-version == 'pypy3' }} run: | pytest -vv jupyter_server - name: Install the Python dependencies for the examples run: | cd examples/simple && pip install -e . - name: Run the tests for the examples run: | pytest examples/simple - name: Coverage if: ${{ matrix.python-version != 'pypy3' }} run: | codecov - name: Test full install run: | python -m venv test_install ./test_install/bin/python -m pip install -U pip ./test_install/bin/python -m pip install ".[test]" pushd test_install ./bin/pytest --pyargs jupyter_server popd jupyter_server-1.13.1/.github/workflows/python-macos.yml000066400000000000000000000025301415445537200234400ustar00rootroot00000000000000name: Jupyter Server Tests [Mac OS] on: push: branches: "master" pull_request: branches: "*" jobs: build: runs-on: ${{ matrix.os }}-latest strategy: fail-fast: false matrix: os: [macos] python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "pypy-3.7"] steps: - name: Checkout uses: actions/checkout@v2 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install the Python dependencies run: | pip install -e .[test] codecov - name: List installed packages run: | pip freeze pip check - name: Run the tests if: ${{ !startsWith( matrix.python-version, 'pypy' ) }} run: | pytest -vv jupyter_server --cov jupyter_server --cov-branch --cov-report term-missing:skip-covered - name: Run the tests on pypy if: ${{ startsWith( matrix.python-version, 'pypy' ) }} run: | pytest -vv jupyter_server - name: Install the Python dependencies for the examples run: | cd examples/simple && pip install -e . - name: Run the tests for the examples run: | pytest examples/simple - name: Coverage if: ${{ !startsWith( matrix.python-version, 'pypy' ) }} run: | codecov jupyter_server-1.13.1/.github/workflows/python-windows.yml000066400000000000000000000023311415445537200240270ustar00rootroot00000000000000name: Jupyter Server Tests [Windows] on: push: branches: "master" pull_request: branches: "*" jobs: build: runs-on: ${{ matrix.os }}-latest strategy: fail-fast: false matrix: os: [windows] python-version: ["3.6", "3.7", "3.8", "3.9"] steps: - name: Checkout uses: actions/checkout@v2 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install the Python dependencies run: | pip install -e .[test] - name: List installed packages run: | pip freeze pip check - name: Run the tests run: | # Disable capturing (-s) output from Pytest on Windows. # For an unknown reason, capturing output interferes with # the file descriptions opened by the asyncio IOLoop. # This leads to a nasty, flaky race condition that we haven't # been able to solve. pytest -vv -s jupyter_server - name: Install the Python dependencies for the examples run: | cd examples/simple && pip install -e . - name: Run the tests for the examples run: | pytest examples/simple jupyter_server-1.13.1/.gitignore000066400000000000000000000011101415445537200166600ustar00rootroot00000000000000MANIFEST build dist _build docs/man/*.gz docs/source/api/generated docs/source/config.rst docs/gh-pages jupyter_server/i18n/*/LC_MESSAGES/*.mo jupyter_server/i18n/*/LC_MESSAGES/nbjs.json jupyter_server/static/style/*.min.css* node_modules *.py[co] __pycache__ *.egg-info *~ *.bak .ipynb_checkpoints .tox .DS_Store \#*# .#* .coverage* .pytest_cache src *.swp *.map Read the Docs config.rst /.project /.pydevproject # copied changelog file docs/source/other/changelog.md # jetbrains ide stuff *.iml .idea/ # vscode ide stuff *.code-workspace .history .vscode/* !.vscode/*.template jupyter_server-1.13.1/.gitmodules000066400000000000000000000000001415445537200170420ustar00rootroot00000000000000jupyter_server-1.13.1/.mailmap000066400000000000000000000250671415445537200163320ustar00rootroot00000000000000A. J. Holyoake ajholyoake Aaron Culich Aaron Culich Aron Ahmadia ahmadia Benjamin Ragan-Kelley Benjamin Ragan-Kelley Min RK Benjamin Ragan-Kelley MinRK Barry Wark Barry Wark Ben Edwards Ben Edwards Bradley M. Froehle Bradley M. Froehle Bradley M. Froehle Bradley Froehle Brandon Parsons Brandon Parsons Brian E. Granger Brian Granger Brian E. Granger Brian Granger <> Brian E. Granger bgranger <> Brian E. Granger bgranger Christoph Gohlke cgohlke Cyrille Rossant rossant Damián Avila damianavila Damián Avila damianavila Damon Allen damontallen Darren Dale darren.dale <> Darren Dale Darren Dale <> Dav Clark Dav Clark <> Dav Clark Dav Clark David Hirschfeld dhirschfeld David P. Sanders David P. Sanders David Warde-Farley David Warde-Farley <> Doug Blank Doug Blank Eugene Van den Bulke Eugene Van den Bulke Evan Patterson Evan Patterson Evan Patterson Evan Patterson Evan Patterson epatters Evan Patterson epatters Ernie French Ernie French Ernie French ernie french Ernie French ernop Fernando Perez Fernando Perez Fernando Perez Fernando Perez fperez <> Fernando Perez fptest <> Fernando Perez fptest1 <> Fernando Perez Fernando Perez Fernando Perez Fernando Perez <> Fernando Perez Fernando Perez Frank Murphy Frank Murphy Gabriel Becker gmbecker Gael Varoquaux gael.varoquaux <> Gael Varoquaux gvaroquaux Gael Varoquaux Gael Varoquaux <> Ingolf Becker watercrossing Jake Vanderplas Jake Vanderplas Jakob Gager jakobgager Jakob Gager jakobgager Jakob Gager jakobgager Jason Grout Jason Grout Jason Gors jason gors Jason Gors jgors Jens Hedegaard Nielsen Jens Hedegaard Nielsen Jens Hedegaard Nielsen Jens H Nielsen Jens Hedegaard Nielsen Jens H. Nielsen Jez Ng Jez Ng Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic Jonathan Frederic jon Jonathan Frederic U-Jon-PC\Jon Jonathan March Jonathan March Jonathan March jdmarch Jörgen Stenarson Jörgen Stenarson Jörgen Stenarson Jorgen Stenarson Jörgen Stenarson Jorgen Stenarson <> Jörgen Stenarson jstenar Jörgen Stenarson jstenar <> Jörgen Stenarson Jörgen Stenarson Juergen Hasch juhasch Juergen Hasch juhasch Julia Evans Julia Evans Kester Tong KesterTong Kyle Kelley Kyle Kelley Kyle Kelley rgbkrk Laurent Dufréchou Laurent Dufréchou Laurent Dufréchou laurent dufrechou <> Laurent Dufréchou laurent.dufrechou <> Laurent Dufréchou Laurent Dufrechou <> Laurent Dufréchou laurent.dufrechou@gmail.com <> Laurent Dufréchou ldufrechou Lorena Pantano Lorena Luis Pedro Coelho Luis Pedro Coelho Marc Molla marcmolla Martín Gaitán Martín Gaitán Matthias Bussonnier Matthias BUSSONNIER Matthias Bussonnier Bussonnier Matthias Matthias Bussonnier Matthias BUSSONNIER Matthias Bussonnier Matthias Bussonnier Michael Droettboom Michael Droettboom Nicholas Bollweg Nicholas Bollweg (Nick) Nicolas Rougier Nikolay Koldunov Nikolay Koldunov Omar Andrés Zapata Mesa Omar Andres Zapata Mesa Omar Andrés Zapata Mesa Omar Andres Zapata Mesa Pankaj Pandey Pankaj Pandey Pascal Schetelat pascal-schetelat Paul Ivanov Paul Ivanov Pauli Virtanen Pauli Virtanen <> Pauli Virtanen Pauli Virtanen Pierre Gerold Pierre Gerold Pietro Berkes Pietro Berkes Piti Ongmongkolkul piti118 Prabhu Ramachandran Prabhu Ramachandran <> Puneeth Chaganti Puneeth Chaganti Robert Kern rkern <> Robert Kern Robert Kern Robert Kern Robert Kern Robert Kern Robert Kern <> Robert Marchman Robert Marchman Satrajit Ghosh Satrajit Ghosh Satrajit Ghosh Satrajit Ghosh Scott Sanderson Scott Sanderson smithj1 smithj1 smithj1 smithj1 Steven Johnson stevenJohnson Steven Silvester blink1073 S. Weber s8weber Stefan van der Walt Stefan van der Walt Silvia Vinyes Silvia Silvia Vinyes silviav12 Sylvain Corlay Sylvain Corlay sylvain.corlay Ted Drain TD22057 Théophile Studer Théophile Studer Thomas Kluyver Thomas Thomas Spura Thomas Spura Timo Paulssen timo vds vds2212 vds vds Ville M. Vainio Ville M. Vainio ville Ville M. Vainio ville Ville M. Vainio vivainio <> Ville M. Vainio Ville M. Vainio Ville M. Vainio Ville M. Vainio Walter Doerwald walter.doerwald <> Walter Doerwald Walter Doerwald <> W. Trevor King W. Trevor King Yoval P. y-p jupyter_server-1.13.1/.pre-commit-config.yaml000066400000000000000000000016431415445537200211640ustar00rootroot00000000000000repos: - repo: https://github.com/asottile/reorder_python_imports rev: v1.9.0 hooks: - id: reorder-python-imports - repo: https://github.com/psf/black rev: 20.8b1 hooks: - id: black args: ["--line-length", "100"] - repo: https://github.com/pre-commit/mirrors-prettier rev: v2.2.1 hooks: - id: prettier - repo: https://gitlab.com/pycqa/flake8 rev: "3.8.4" hooks: - id: flake8 - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.4.0 hooks: - id: end-of-file-fixer - id: check-case-conflict - id: check-executables-have-shebangs - id: requirements-txt-fixer - repo: https://github.com/pre-commit/mirrors-eslint rev: v7.32.0 hooks: - id: eslint - repo: https://github.com/pre-commit/mirrors-pylint rev: v3.0.0a3 hooks: - id: pylint args: [--disable=all, --enable=unused-import] jupyter_server-1.13.1/.prettierignore000066400000000000000000000000241415445537200177360ustar00rootroot00000000000000**/templates/*.html jupyter_server-1.13.1/CHANGELOG.md000066400000000000000000001660001415445537200165130ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. ## 1.13.1 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.13.0...affd5d9a2e6d718baa2185518256f51921fd4484)) ### Bugs fixed - nudge both the shell and control channels [#636](https://github.com/jupyter-server/jupyter_server/pull/636) ([@Zsailer](https://github.com/Zsailer)) ### Maintenance and upkeep improvements - Fix macos pypy check [#632](https://github.com/jupyter-server/jupyter_server/pull/632) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-12-06&to=2021-12-09&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-12-06..2021-12-09&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-12-06..2021-12-09&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-12-06..2021-12-09&type=Issues) ## 1.13.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.12.1...b51969f16f04375d52cb029d72f90174141c760d)) ### Enhancements made - Persistent session storage [#614](https://github.com/jupyter-server/jupyter_server/pull/614) ([@Zsailer](https://github.com/Zsailer)) ### Bugs fixed - Nudge on the control channel instead of the shell [#628](https://github.com/jupyter-server/jupyter_server/pull/628) ([@JohanMabille](https://github.com/JohanMabille)) ### Maintenance and upkeep improvements - Clean up downstream tests [#629](https://github.com/jupyter-server/jupyter_server/pull/629) ([@blink1073](https://github.com/blink1073)) - Clean up version info handling [#620](https://github.com/jupyter-server/jupyter_server/pull/620) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-11-26&to=2021-12-06&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-11-26..2021-12-06&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-11-26..2021-12-06&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2021-11-26..2021-12-06&type=Issues) | [@JohanMabille](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AJohanMabille+updated%3A2021-11-26..2021-12-06&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-11-26..2021-12-06&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-11-26..2021-12-06&type=Issues) ## 1.12.1 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.12.0...ead83374b3b874bdf4ea47fca5aee1ecb5940a85)) ### Bugs fixed - Await `_finish_kernel_start` [#617](https://github.com/jupyter-server/jupyter_server/pull/617) ([@jtpio](https://github.com/jtpio)) ### Maintenance and upkeep improvements - Update to Python 3.10 in the CI workflows [#618](https://github.com/jupyter-server/jupyter_server/pull/618) ([@jtpio](https://github.com/jtpio)) - Use `maintainer-tools` base setup action [#616](https://github.com/jupyter-server/jupyter_server/pull/616) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-11-23&to=2021-11-26&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-11-23..2021-11-26&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-11-23..2021-11-26&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-11-23..2021-11-26&type=Issues) ## 1.12.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.11.2...758dba6f8873f60c1ca41057b4be108da5a6ff1a)) ### Enhancements made - Consistent logging method [#607](https://github.com/jupyter-server/jupyter_server/pull/607) ([@mwakaba2](https://github.com/mwakaba2)) - Use pending kernels [#593](https://github.com/jupyter-server/jupyter_server/pull/593) ([@blink1073](https://github.com/blink1073)) ### Bugs fixed - Set `xsrf` cookie on base url [#612](https://github.com/jupyter-server/jupyter_server/pull/612) ([@minrk](https://github.com/minrk)) - Update `jpserver_extensions` trait to work with `traitlets` 5.x [#610](https://github.com/jupyter-server/jupyter_server/pull/610) ([@Zsailer](https://github.com/Zsailer)) - Fix `allow_origin_pat` property to properly parse regex [#603](https://github.com/jupyter-server/jupyter_server/pull/603) ([@havok2063](https://github.com/havok2063)) ### Maintenance and upkeep improvements - Enforce labels on PRs [#613](https://github.com/jupyter-server/jupyter_server/pull/613) ([@blink1073](https://github.com/blink1073)) - Normalize file name and path in `test_api` [#608](https://github.com/jupyter-server/jupyter_server/pull/608) ([@toonn](https://github.com/toonn)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-11-01&to=2021-11-23&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-11-01..2021-11-23&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-11-01..2021-11-23&type=Issues) | [@havok2063](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ahavok2063+updated%3A2021-11-01..2021-11-23&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2021-11-01..2021-11-23&type=Issues) | [@mwakaba2](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amwakaba2+updated%3A2021-11-01..2021-11-23&type=Issues) | [@toonn](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Atoonn+updated%3A2021-11-01..2021-11-23&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-11-01..2021-11-23&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-11-01..2021-11-23&type=Issues) ## 1.11.2 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.11.1...fda4cc5a96703bb4e871a5a622ef6031c7f6385b)) ### Bugs fixed - Fix \s deprecation warning [#600](https://github.com/jupyter-server/jupyter_server/pull/600) ([@Zsailer](https://github.com/Zsailer)) - Remove requests-unixsocket dependency [#599](https://github.com/jupyter-server/jupyter_server/pull/599) ([@kevin-bates](https://github.com/kevin-bates)) - bugfix: dir_exists is never awaited [#597](https://github.com/jupyter-server/jupyter_server/pull/597) ([@stdll00](https://github.com/stdll00)) - Fix missing await when call 'async_replace_file' [#595](https://github.com/jupyter-server/jupyter_server/pull/595) ([@Wh1isper](https://github.com/Wh1isper)) - add a pytest fixture for capturing logging stream [#588](https://github.com/jupyter-server/jupyter_server/pull/588) ([@Zsailer](https://github.com/Zsailer)) ### Maintenance and upkeep improvements - Avoid dependency on NBConvert versions for REST API test [#601](https://github.com/jupyter-server/jupyter_server/pull/601) ([@Zsailer](https://github.com/Zsailer)) - Bump ansi-regex from 5.0.0 to 5.0.1 [#590](https://github.com/jupyter-server/jupyter_server/pull/590) ([@dependabot](https://github.com/dependabot)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-10-04&to=2021-11-01&type=c)) [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-10-04..2021-11-01&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adependabot+updated%3A2021-10-04..2021-11-01&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-10-04..2021-11-01&type=Issues) | [@stdll00](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Astdll00+updated%3A2021-10-04..2021-11-01&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-10-04..2021-11-01&type=Issues) | [@Wh1isper](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AWh1isper+updated%3A2021-10-04..2021-11-01&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-10-04..2021-11-01&type=Issues) ## 1.11.1 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.11.0...f4c3889658c1daad1d8966438d1f1b98b3f60641)) ### Bugs fixed - Do not log connection error if the kernel is already shutdown [#584](https://github.com/jupyter-server/jupyter_server/pull/584) ([@martinRenou](https://github.com/martinRenou)) - [BUG]: allow None for min_open_files_limit trait [#587](https://github.com/jupyter-server/jupyter_server/pull/587) ([@Zsailer](https://github.com/Zsailer)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-09-09&to=2021-10-04&type=c)) [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-09-09..2021-10-04&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AmartinRenou+updated%3A2021-09-09..2021-10-04&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-09-09..2021-10-04&type=Issues) ## 1.11.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.10.2...1863fde11af7971d040ad50ad015caa83b6c7d54)) ### Enhancements made - Allow non-empty directory deletion through settings [#574](https://github.com/jupyter-server/jupyter_server/pull/574) ([@fcollonval](https://github.com/fcollonval)) ### Bugs fixed - pytest_plugin: allow user specified headers in jp_ws_fetch [#580](https://github.com/jupyter-server/jupyter_server/pull/580) ([@oliver-sanders](https://github.com/oliver-sanders)) - Shutdown kernels/terminals on api/shutdown [#579](https://github.com/jupyter-server/jupyter_server/pull/579) ([@martinRenou](https://github.com/martinRenou)) - pytest: package conftest [#576](https://github.com/jupyter-server/jupyter_server/pull/576) ([@oliver-sanders](https://github.com/oliver-sanders)) - Set stacklevel on warning to point to the right place. [#572](https://github.com/jupyter-server/jupyter_server/pull/572) ([@Carreau](https://github.com/Carreau)) - Respect reraise setting [#571](https://github.com/jupyter-server/jupyter_server/pull/571) ([@vidartf](https://github.com/vidartf)) ### Maintenance and upkeep improvements - Fix jupyter_client warning [#581](https://github.com/jupyter-server/jupyter_server/pull/581) ([@martinRenou](https://github.com/martinRenou)) - Add Pre-Commit Config [#575](https://github.com/jupyter-server/jupyter_server/pull/575) ([@fcollonval](https://github.com/fcollonval)) - Clean up link checking [#569](https://github.com/jupyter-server/jupyter_server/pull/569) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-08-02&to=2021-09-09&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-08-02..2021-09-09&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2021-08-02..2021-09-09&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-08-02..2021-09-09&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2021-08-02..2021-09-09&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AmartinRenou+updated%3A2021-08-02..2021-09-09&type=Issues) | [@oliver-sanders](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aoliver-sanders+updated%3A2021-08-02..2021-09-09&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2021-08-02..2021-09-09&type=Issues) ## 1.10.2 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.10.1...7956dc51d8239b7b9e8de3b22ceb4473bbf1d4e5)) ### Bugs fixed - fix: make command line aliases work again [#564](https://github.com/jupyter-server/jupyter_server/pull/564) ([@mariobuikhuizen](https://github.com/mariobuikhuizen)) - decode bytes from secure cookie [#562](https://github.com/jupyter-server/jupyter_server/pull/562) ([@oliver-sanders](https://github.com/oliver-sanders)) ### Maintenance and upkeep improvements - Add the needed space in the welcome message [#561](https://github.com/jupyter-server/jupyter_server/pull/561) ([@echarles](https://github.com/echarles)) - Update check-release workflow [#558](https://github.com/jupyter-server/jupyter_server/pull/558) ([@afshin](https://github.com/afshin)) ### Documentation improvements - Fix typo in allow_password_change help [#559](https://github.com/jupyter-server/jupyter_server/pull/559) ([@manics](https://github.com/manics)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-07-23&to=2021-08-02&type=c)) [@afshin](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aafshin+updated%3A2021-07-23..2021-08-02&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-07-23..2021-08-02&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2021-07-23..2021-08-02&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amanics+updated%3A2021-07-23..2021-08-02&type=Issues) | [@mariobuikhuizen](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amariobuikhuizen+updated%3A2021-07-23..2021-08-02&type=Issues) | [@oliver-sanders](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aoliver-sanders+updated%3A2021-07-23..2021-08-02&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-07-23..2021-08-02&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-07-23..2021-08-02&type=Issues) ## 1.10.1 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.10.0...42a195665aa8ae218fce4ec8165f19e734a9edaf)) ### Bugs fixed - Protect against unset spec [#556](https://github.com/jupyter-server/jupyter_server/pull/556) ([@fcollonval](https://github.com/fcollonval)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-07-22&to=2021-07-23&type=c)) [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2021-07-22..2021-07-23&type=Issues) ## 1.10.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.9.0...c9ee2a45e9a8f04215c2f3901f90cdc7b8fdc9c6)) ### Enhancements made - PR: Add a new preferred-dir traitlet [#549](https://github.com/jupyter-server/jupyter_server/pull/549) ([@goanpeca](https://github.com/goanpeca)) - stop hook for extensions [#526](https://github.com/jupyter-server/jupyter_server/pull/526) ([@oliver-sanders](https://github.com/oliver-sanders)) - extensions: allow extensions in namespace packages [#523](https://github.com/jupyter-server/jupyter_server/pull/523) ([@oliver-sanders](https://github.com/oliver-sanders)) ### Bugs fixed - Fix examples/simple test execution [#552](https://github.com/jupyter-server/jupyter_server/pull/552) ([@davidbrochart](https://github.com/davidbrochart)) - Rebuild package-lock, fixing local setup [#548](https://github.com/jupyter-server/jupyter_server/pull/548) ([@martinRenou](https://github.com/martinRenou)) ### Maintenance and upkeep improvements - small test changes [#541](https://github.com/jupyter-server/jupyter_server/pull/541) ([@oliver-sanders](https://github.com/oliver-sanders)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-06-24&to=2021-07-21&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-06-24..2021-07-21&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-06-24..2021-07-21&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2021-06-24..2021-07-21&type=Issues) | [@goanpeca](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Agoanpeca+updated%3A2021-06-24..2021-07-21&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-06-24..2021-07-21&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AmartinRenou+updated%3A2021-06-24..2021-07-21&type=Issues) | [@oliver-sanders](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aoliver-sanders+updated%3A2021-06-24..2021-07-21&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-06-24..2021-07-21&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-06-24..2021-07-21&type=Issues) ## 1.9.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.8.0...f712734c4f7005f6a844abec9f57b993e7b004b0)) ### Enhancements made - enable a way to run a task when an io_loop is created [#531](https://github.com/jupyter-server/jupyter_server/pull/531) ([@eastonsuo](https://github.com/eastonsuo)) - adds `GatewayClient.auth_scheme` configurable [#529](https://github.com/jupyter-server/jupyter_server/pull/529) ([@telamonian](https://github.com/telamonian)) - [Notebook port 4835] Add UNIX socket support to notebook server [#525](https://github.com/jupyter-server/jupyter_server/pull/525) ([@jtpio](https://github.com/jtpio)) ### Bugs fixed - Fix nbconvert handler [#545](https://github.com/jupyter-server/jupyter_server/pull/545) ([@davidbrochart](https://github.com/davidbrochart)) - Fixes AsyncContentsManager#exists [#542](https://github.com/jupyter-server/jupyter_server/pull/542) ([@icankeep](https://github.com/icankeep)) ### Maintenance and upkeep improvements - argon2 as an optional dependency [#532](https://github.com/jupyter-server/jupyter_server/pull/532) ([@vidartf](https://github.com/vidartf)) - Test Downstream Packages [#528](https://github.com/jupyter-server/jupyter_server/pull/528) ([@blink1073](https://github.com/blink1073)) - fix jp_ws_fetch not work by its own #441 [#527](https://github.com/jupyter-server/jupyter_server/pull/527) ([@eastonsuo](https://github.com/eastonsuo)) ### Documentation improvements - Update link to meeting notes [#535](https://github.com/jupyter-server/jupyter_server/pull/535) ([@krassowski](https://github.com/krassowski)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-05-20&to=2021-06-24&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-05-20..2021-06-24&type=Issues) | [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-05-20..2021-06-24&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2021-05-20..2021-06-24&type=Issues) | [@eastonsuo](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aeastonsuo+updated%3A2021-05-20..2021-06-24&type=Issues) | [@icankeep](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aicankeep+updated%3A2021-05-20..2021-06-24&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-05-20..2021-06-24&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-05-20..2021-06-24&type=Issues) | [@krassowski](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akrassowski+updated%3A2021-05-20..2021-06-24&type=Issues) | [@telamonian](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Atelamonian+updated%3A2021-05-20..2021-06-24&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2021-05-20..2021-06-24&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-05-20..2021-06-24&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-05-20..2021-06-24&type=Issues) ## 1.8.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.7.0...b063117a3a48ea67371c62e492f4637e44157586)) ### Enhancements made - Expose a public property to sort extensions deterministically. [#522](https://github.com/jupyter-server/jupyter_server/pull/522) ([@Zsailer](https://github.com/Zsailer)) ### Bugs fixed - init_httpserver at the end of initialize [#517](https://github.com/jupyter-server/jupyter_server/pull/517) ([@minrk](https://github.com/minrk)) ### Maintenance and upkeep improvements - Upgrade anyio to 3.1 for all py versions [#521](https://github.com/jupyter-server/jupyter_server/pull/521) ([@mwakaba2](https://github.com/mwakaba2)) - Enable Server Tests on Windows [#519](https://github.com/jupyter-server/jupyter_server/pull/519) ([@jtpio](https://github.com/jtpio)) - restore preference for SelectorEventLoop on Windows [#513](https://github.com/jupyter-server/jupyter_server/pull/513) ([@minrk](https://github.com/minrk)) - set default config dir name [#504](https://github.com/jupyter-server/jupyter_server/pull/504) ([@minrk](https://github.com/minrk)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-05-10&to=2021-05-20&type=c)) [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-05-10..2021-05-20&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-05-10..2021-05-20&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2021-05-10..2021-05-20&type=Issues) | [@mwakaba2](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amwakaba2+updated%3A2021-05-10..2021-05-20&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2021-05-10..2021-05-20&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-05-10..2021-05-20&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-05-10..2021-05-20&type=Issues) ## 1.7.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.7.0a2...afae85a7bb8c45f7610cd38b60d6075bb623490b)) ### Bugs fixed - Fix for recursive symlink - (port Notebook 4670) [#497](https://github.com/jupyter-server/jupyter_server/pull/497) ([@kevin-bates](https://github.com/kevin-bates)) ### Enhancements made - Make nbconvert root handler asynchronous [#512](https://github.com/jupyter-server/jupyter_server/pull/512) ([@hMED22](https://github.com/hMED22)) - Refactor gateway kernel management to achieve a degree of consistency [#483](https://github.com/jupyter-server/jupyter_server/pull/483) ([@kevin-bates](https://github.com/kevin-bates)) ### Maintenance and upkeep improvements - Remove Packaging Dependency [#515](https://github.com/jupyter-server/jupyter_server/pull/515) ([@jtpio](https://github.com/jtpio)) - Use kernel_id for new kernel if it doesn't exist in MappingKernelManager.start_kernel [#511](https://github.com/jupyter-server/jupyter_server/pull/511) ([@the-higgs](https://github.com/the-higgs)) - Include backtrace in debug output when extension fails to load [#506](https://github.com/jupyter-server/jupyter_server/pull/506) ([@candlerb](https://github.com/candlerb)) - ExtensionPoint: return True on successful validate() [#503](https://github.com/jupyter-server/jupyter_server/pull/503) ([@minrk](https://github.com/minrk)) - ExtensionManager: load default config manager by default [#502](https://github.com/jupyter-server/jupyter_server/pull/502) ([@minrk](https://github.com/minrk)) - Prep for Release Helper Usage [#494](https://github.com/jupyter-server/jupyter_server/pull/494) ([@jtpio](https://github.com/jtpio)) - Typo in shutdown with answer_yes [#491](https://github.com/jupyter-server/jupyter_server/pull/491) ([@kiendang](https://github.com/kiendang)) - Remove some of ipython_genutils no-op. [#440](https://github.com/jupyter-server/jupyter_server/pull/440) ([@Carreau](https://github.com/Carreau)) - Drop dependency on pywin32 [#514](https://github.com/jupyter-server/jupyter_server/pull/514) ([@kevin-bates](https://github.com/kevin-bates)) - Upgrade anyio to v3 [#492](https://github.com/jupyter-server/jupyter_server/pull/492) ([@mwakaba2](https://github.com/mwakaba2)) - Add Appropriate Token Permission for CodeQL Workflow [#489](https://github.com/jupyter-server/jupyter_server/pull/489) ([@afshin](https://github.com/afshin)) ### Documentation improvements - DOC: Autoreformat docstrings. [#493](https://github.com/jupyter-server/jupyter_server/pull/493) ([@Carreau](https://github.com/Carreau)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-04-22&to=2021-05-10&type=c)) [@codecov-commenter](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-commenter+updated%3A2021-05-06..2021-05-10&type=Issues) | [@hMED22](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AhMED22+updated%3A2021-05-06..2021-05-10&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-05-06..2021-05-10&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-05-06..2021-05-10&type=Issues) | [@the-higgs](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Athe-higgs+updated%3A2021-05-06..2021-05-10&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-05-06..2021-05-10&type=Issues) [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-05-01..2021-05-05&type=Issues) | [@candlerb](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acandlerb+updated%3A2021-05-01..2021-05-05&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-05-01..2021-05-05&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2021-05-01..2021-05-05&type=Issues) | [@mwakaba2](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Amwakaba2+updated%3A2021-05-01..2021-05-05&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-05-01..2021-05-05&type=Issues) | [@kiendang](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akiendang+updated%3A2021-04-21..2021-05-01&type=Issues) | [@Carreau] (https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2021-04-21..2021-05-01&type=Issues) ## 1.6.4 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.6.3...68a64ea13be5d0d86460f04e0c47eb0b6662a0af)) ### Bugs fixed - Fix loading of sibling extensions [#485](https://github.com/jupyter-server/jupyter_server/pull/485) ([@afshin](https://github.com/afshin)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-04-21&to=2021-04-21&type=c)) [@afshin](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aafshin+updated%3A2021-04-21..2021-04-21&type=Issues) ## 1.6.3 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v1.6.2...aa2636795ae1d87e3055febb3931f891dd6b4451)) ### Merges - Gate anyio version. [2b51ee3](https://github.com/jupyter-server/jupyter_server/commit/2b51ee37bdad305cb349e246c8ba94381cdb2048) - Fix activity tracking and nudge issues when kernel ports change on restarts [#482](https://github.com/jupyter-server/jupyter_server/pull/482) ([@kevin-bates](https://github.com/kevin-bates)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-04-16&to=2021-04-21&type=c)) [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-04-16..2021-04-21&type=Issues) ## 1.6.2 ### Enhancements made - Tighten xsrf checks [#478](https://github.com/jupyter-server/jupyter_server/pull/478) ([@jtpio](https://github.com/jtpio)) ### Bugs fixed - Re-enable support for answer_yes flag [#479](https://github.com/jupyter-server/jupyter_server/pull/479) ([@jtpio](https://github.com/jtpio)) ### Maintenance and upkeep improvements - Use Jupyter Packaging [#477](https://github.com/jupyter-server/jupyter_server/pull/477) ([@jtpio](https://github.com/jtpio)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-04-12&to=2021-04-16&type=c)) [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-04-12..2021-04-16&type=Issues) ## 1.6.1 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.6.0...2756a29c5fdcfa62a3492004627541089d53d14f)) ### Merged PRs - Fix race condition with async kernel management [#472](https://github.com/jupyter-server/jupyter_server/pull/472) ([@jtpio](https://github.com/jtpio)) - Fix kernel lookup [#475](https://github.com/jupyter-server/jupyter_server/pull/475) ([@davidbrochart](https://github.com/davidbrochart)) - Add Extension App Aliases to Server App [#473](https://github.com/jupyter-server/jupyter_server/pull/473) ([@jtpio](https://github.com/jtpio)) - Correct 'Content-Type' headers [#471](https://github.com/jupyter-server/jupyter_server/pull/471) ([@faucct](https://github.com/faucct)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-04-08&to=2021-04-12&type=c)) [@codecov-io](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-io+updated%3A2021-04-08..2021-04-12&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Adavidbrochart+updated%3A2021-04-08..2021-04-12&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2021-04-08..2021-04-12&type=Issues) | [@faucct](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afaucct+updated%3A2021-04-08..2021-04-12&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-04-08..2021-04-12&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2021-04-08..2021-04-12&type=Issues) ## 1.6.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.5.1...724c38ec08c15cf1ed3c2efb2ad5c11f684f2cda)) ### New features added - Add env variable support for port options [#461](https://github.com/jupyter-server/jupyter_server/pull/461) ([@afshin](https://github.com/afshin)) ### Enhancements made - Add support for JUPYTER_TOKEN_FILE [#462](https://github.com/jupyter-server/jupyter_server/pull/462) ([@afshin](https://github.com/afshin)) ### Maintenance and upkeep improvements - Remove unnecessary future imports [#464](https://github.com/jupyter-server/jupyter_server/pull/464) ([@afshin](https://github.com/afshin)) ### Documentation improvements - Add Changelog to Sphinx Docs [#465](https://github.com/jupyter-server/jupyter_server/pull/465) ([@afshin](https://github.com/afshin)) - Update description for kernel restarted in the API docs [#463](https://github.com/jupyter-server/jupyter_server/pull/463) ([@jtpio](https://github.com/jtpio)) - Delete the extra “or” that prevents easy cut-and-paste of URLs. [#460](https://github.com/jupyter-server/jupyter_server/pull/460) ([@jasongrout](https://github.com/jasongrout)) - Add descriptive log for port unavailable and port-retries=0 [#459](https://github.com/jupyter-server/jupyter_server/pull/459) ([@afshin](https://github.com/afshin)) ### Other merged PRs - Add ReadTheDocs config [#468](https://github.com/jupyter-server/jupyter_server/pull/468) ([@jtpio](https://github.com/jtpio)) - Update MappingKM.restart_kernel to accept now kwarg [#404](https://github.com/jupyter-server/jupyter_server/pull/404) ([@vidartf](https://github.com/vidartf)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-03-24&to=2021-04-08&type=c)) [@afshin](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aafshin+updated%3A2021-03-24..2021-04-08&type=Issues) | [@codecov-io](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-io+updated%3A2021-03-24..2021-04-08&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aecharles+updated%3A2021-03-24..2021-04-08&type=Issues) | [@jasongrout](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajasongrout+updated%3A2021-03-24..2021-04-08&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-03-24..2021-04-08&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-03-24..2021-04-08&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2021-03-24..2021-04-08&type=Issues) ## 1.5.1 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.5.0...c3303cde880ecd1103118b8c7f9e5ebc19f0d1ba)) **Merged pull requests:** - Ensure jupyter config dir exists [#454](https://github.com/jupyter-server/jupyter_server/pull/454) ([@afshin](https://github.com/afshin)) - Allow `pre_save_hook` to cancel save with `HTTPError` [#456](https://github.com/jupyter-server/jupyter_server/pull/456) ([@minrk](https://github.com/minrk)) **Contributors to this release:** ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-03-23&to=2021-03-24&type=c)) [@afshin](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aafshin+updated%3A2021-03-23..2021-03-24&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2021-03-23..2021-03-24&type=Issues) ## 1.5.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.4.1...74801f479d7bb89c5afd4c020c52e614cc566da5)) **Merged pull requests:** - Add Styling to the HTML Pages [#452](https://github.com/jupyter-server/jupyter_server/pull/452) ([@afshin](https://github.com/afshin)) - Implement password hashing with `argon2-cffi` [#450](https://github.com/jupyter-server/jupyter_server/pull/450) ([@afshin](https://github.com/afshin)) - Escape user input in handlers flagged during code scans [#449](https://github.com/jupyter-server/jupyter_server/pull/449) ([@kevin-bates](https://github.com/kevin-bates)) - Fix for the terminal shutdown issue [#446](https://github.com/jupyter-server/jupyter_server/pull/446) ([@afshin](https://github.com/afshin)) - Update the branch filter for the CI badge [#445](https://github.com/jupyter-server/jupyter_server/pull/445) ([@jtpio](https://github.com/jtpio)) - Fix for `UnboundLocalError` in shutdown [#444](https://github.com/jupyter-server/jupyter_server/pull/444) ([@afshin](https://github.com/afshin)) - Update CI badge and fix broken link [#443](https://github.com/jupyter-server/jupyter_server/pull/443) ([@blink1073](https://github.com/blink1073)) - Fix syntax typo [#442](https://github.com/jupyter-server/jupyter_server/pull/442) ([@kiendang](https://github.com/kiendang)) - Port terminal culling from Notebook [#438](https://github.com/jupyter-server/jupyter_server/pull/438) ([@kevin-bates](https://github.com/kevin-bates)) - More complex handling of `open_browser` from extension applications [#433](https://github.com/jupyter-server/jupyter_server/pull/433) ([@afshin](https://github.com/afshin)) - Correction in Changelog [#429](https://github.com/jupyter-server/jupyter_server/pull/429) ([@Zsailer](https://github.com/Zsailer)) - Rename translation function alias [#428](https://github.com/jupyter-server/jupyter_server/pull/428) ([@sngyo](https://github.com/sngyo)) **Contributors to this release:** ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-02-22&to=2021-03-23&type=c)) [@afshin](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aafshin+updated%3A2021-02-22..2021-03-23&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2021-02-22..2021-03-23&type=Issues) | [@codecov-io](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov-io+updated%3A2021-02-22..2021-03-23&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajtpio+updated%3A2021-02-22..2021-03-23&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2021-02-22..2021-03-23&type=Issues) | [@kiendang](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akiendang+updated%3A2021-02-22..2021-03-23&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2021-02-22..2021-03-23&type=Issues) | [@sngyo](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Asngyo+updated%3A2021-02-22..2021-03-23&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-02-22..2021-03-23&type=Issues) ## [1.4.1](https://github.com/jupyter-server/jupyter_server/tree/1.4.1) (2021-02-22) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.4.0...bc252d33de2f647f98d048dc32888f0a83f005ac) **Merged pull requests:** - Update README.md [#425](https://github.com/jupyter-server/jupyter_server/pull/425) ([@BobinMathew](https://github.com/BobinMathew)) - Solve UnboundLocalError in launch_browser() [#421](https://github.com/jupyter-server/jupyter_server/pull/421) ([@jamesmishra](https://github.com/jamesmishra)) - Add file_to_run to server extension docs [#420](https://github.com/jupyter-server/jupyter_server/pull/420) ([@Zsailer](https://github.com/Zsailer)) - Remove outdated reference to \_jupyter_server_extension_paths in docs [#419](https://github.com/jupyter-server/jupyter_server/pull/419) ([@Zsailer](https://github.com/Zsailer)) **Contributors to this release:** ([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2021-02-18&to=2021-02-22&type=c)) [@jamesmishra](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajamesmishra+updated%3A2021-02-18..2021-02-22&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2021-02-18..2021-02-22&type=Issues) ## [1.4.0](https://github.com/jupyter-server/jupyter_server/tree/1.4.0) (2021-02-18) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.3.0...HEAD) **Merged pull requests:** - Add Tests to Distribution [\#416](https://github.com/jupyter-server/jupyter_server/pull/416) ([afshin](https://github.com/afshin)) - Enable extensions to control the file_to_run [\#415](https://github.com/jupyter-server/jupyter_server/pull/415) ([afshin](https://github.com/afshin)) - add missing template for view.html [\#414](https://github.com/jupyter-server/jupyter_server/pull/414) ([minrk](https://github.com/minrk)) - Remove obsoleted asyncio-patch fixture [\#412](https://github.com/jupyter-server/jupyter_server/pull/412) ([kevin-bates](https://github.com/kevin-bates)) - Emit deprecation warning on old name [\#411](https://github.com/jupyter-server/jupyter_server/pull/411) ([fcollonval](https://github.com/fcollonval)) - Correct logging message position [\#410](https://github.com/jupyter-server/jupyter_server/pull/410) ([fcollonval](https://github.com/fcollonval)) - Update 1.3.0 Changelog to include broken 1.2.3 PRs [\#408](https://github.com/jupyter-server/jupyter_server/pull/408) ([kevin-bates](https://github.com/kevin-bates)) - \[Gateway\] Track only this server's kernels [\#407](https://github.com/jupyter-server/jupyter_server/pull/407) ([kevin-bates](https://github.com/kevin-bates)) - Update manager.py: more descriptive warnings when extensions fail to load [\#396](https://github.com/jupyter-server/jupyter_server/pull/396) ([alberti42](https://github.com/alberti42)) ## [1.3.0](https://github.com/jupyter-server/jupyter_server/tree/1.3.0) (2021-02-04) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.2.2...HEAD) **Merged pull requests (includes those from broken 1.2.3 release):** - Special case ExtensionApp that starts the ServerApp [\#401](https://github.com/jupyter-server/jupyter_server/pull/401) ([afshin](https://github.com/afshin)) - only use deprecated notebook_dir config if root_dir is not set [\#400](https://github.com/jupyter-server/jupyter_server/pull/400) ([minrk](https://github.com/minrk)) - Use async kernel manager by default [\#399](https://github.com/jupyter-server/jupyter_server/pull/399) ([kevin-bates](https://github.com/kevin-bates)) - Revert Session.username default value change [\#398](https://github.com/jupyter-server/jupyter_server/pull/398) ([mwakaba2](https://github.com/mwakaba2)) - Re-enable default_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) - Enable notebook ContentsManager in jupyter_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) - Use jupyter_server_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) - Increase culling test idle timeout [\#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) - update changelog for 1.2.2 [\#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) ## [1.2.3](https://github.com/jupyter-server/jupyter_server/tree/1.2.3) (2021-01-29) This was a broken release and was yanked from PyPI. [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.2.2...HEAD) **Merged pull requests:** - Re-enable default_url in ExtensionApp [\#393](https://github.com/jupyter-server/jupyter_server/pull/393) ([afshin](https://github.com/afshin)) - Enable notebook ContentsManager in jupyter_server [\#392](https://github.com/jupyter-server/jupyter_server/pull/392) ([afshin](https://github.com/afshin)) - Use jupyter_server_config.json as config file in the update password api [\#390](https://github.com/jupyter-server/jupyter_server/pull/390) ([echarles](https://github.com/echarles)) - Increase culling test idle timeout [\#388](https://github.com/jupyter-server/jupyter_server/pull/388) ([kevin-bates](https://github.com/kevin-bates)) - update changelog for 1.2.2 [\#387](https://github.com/jupyter-server/jupyter_server/pull/387) ([Zsailer](https://github.com/Zsailer)) ## [1.2.2](https://github.com/jupyter-server/jupyter_server/tree/1.2.2) (2021-01-14) **Merged pull requests:** - Apply missing ensure_async to root session handler methods [\#386](https://github.com/jupyter-server/jupyter_server/pull/386) ([kevin-bates](https://github.com/kevin-bates)) - Update changelog to 1.2.1 [\#385](https://github.com/jupyter-server/jupyter_server/pull/385) ([Zsailer](https://github.com/Zsailer)) - Fix application exit [\#384](https://github.com/jupyter-server/jupyter_server/pull/384) ([afshin](https://github.com/afshin)) - Replace secure_write, is_hidden, exists with jupyter_core's [\#382](https://github.com/jupyter-server/jupyter_server/pull/382) ([kevin-bates](https://github.com/kevin-bates)) - Add --autoreload flag [\#380](https://github.com/jupyter-server/jupyter_server/pull/380) ([afshin](https://github.com/afshin)) ## [1.2.1](https://github.com/jupyter-server/jupyter_server/tree/1.2.1) (2021-01-08) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.2.0...1.2.1) **Merged pull requests:** - Enable extensions to set debug and open-browser flags [\#379](https://github.com/jupyter-server/jupyter_server/pull/379) ([afshin](https://github.com/afshin)) - Add reconnection to Gateway [\#378](https://github.com/jupyter-server/jupyter_server/pull/378) ([oyvsyo](https://github.com/oyvsyo)) ## [1.2.0](https://github.com/jupyter-server/jupyter_server/tree/1.2.0) (2021-01-07) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.1.4...1.2.0) **Merged pull requests:** - Flip default value for open_browser in extensions [\#377](https://github.com/jupyter-server/jupyter_server/pull/377) ([ajbozarth](https://github.com/ajbozarth)) - Improve Handling of the soft limit on open file handles [\#376](https://github.com/jupyter-server/jupyter_server/pull/376) ([afshin](https://github.com/afshin)) - Handle open_browser trait in ServerApp and ExtensionApp differently [\#375](https://github.com/jupyter-server/jupyter_server/pull/375) ([afshin](https://github.com/afshin)) - Add setting to disable redirect file browser launch [\#374](https://github.com/jupyter-server/jupyter_server/pull/374) ([afshin](https://github.com/afshin)) - Make trust handle use ensure_async [\#373](https://github.com/jupyter-server/jupyter_server/pull/373) ([vidartf](https://github.com/vidartf)) ## [1.1.4](https://github.com/jupyter-server/jupyter_server/tree/1.1.4) (2021-01-04) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.1.3...1.1.4) **Merged pull requests:** - Update the link to paths documentation [\#371](https://github.com/jupyter-server/jupyter_server/pull/371) ([krassowski](https://github.com/krassowski)) - IPythonHandler -\> JupyterHandler [\#370](https://github.com/jupyter-server/jupyter_server/pull/370) ([krassowski](https://github.com/krassowski)) - use setuptools find_packages, exclude tests, docs and examples from dist [\#368](https://github.com/jupyter-server/jupyter_server/pull/368) ([bollwyvl](https://github.com/bollwyvl)) - Update serverapp.py [\#367](https://github.com/jupyter-server/jupyter_server/pull/367) ([michaelaye](https://github.com/michaelaye)) ## [1.1.3](https://github.com/jupyter-server/jupyter_server/tree/1.1.3) (2020-12-23) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.1.2...1.1.3) **Merged pull requests:** - Culling: ensure last_activity attr exists before use [\#365](https://github.com/jupyter-server/jupyter_server/pull/365) ([afshin](https://github.com/afshin)) ## [1.1.2](https://github.com/jupyter-server/jupyter_server/tree/1.1.2) (2020-12-21) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.0.11...1.1.2) **Merged pull requests:** - Nudge kernel with info request until we receive IOPub messages [\#361](https://github.com/jupyter-server/jupyter_server/pull/361) ([SylvainCorlay](https://github.com/SylvainCorlay)) ## [1.1.1](https://github.com/jupyter-server/jupyter_server/tree/1.1.1) (2020-12-16) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.1.0...1.1.1) **Merged pull requests:** - Fix: await possible async dir_exists method [\#363](https://github.com/jupyter-server/jupyter_server/pull/363) ([mwakaba2](https://github.com/mwakaba2)) ## 1.1.0 (2020-12-11) [Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/1.0.10...1.1.0) **Merged pull requests:** - Restore pytest plugin from pytest-jupyter [\#360](https://github.com/jupyter-server/jupyter_server/pull/360) ([kevin-bates](https://github.com/kevin-bates)) - Fix upgrade packaging dependencies build step [\#354](https://github.com/jupyter-server/jupyter_server/pull/354) ([mwakaba2](https://github.com/mwakaba2)) - Await \_connect and inline read_messages callback to \_connect [\#350](https://github.com/jupyter-server/jupyter_server/pull/350) ([ricklamers](https://github.com/ricklamers)) - Update release instructions and dev version [\#348](https://github.com/jupyter-server/jupyter_server/pull/348) ([kevin-bates](https://github.com/kevin-bates)) - Fix test_trailing_slash [\#346](https://github.com/jupyter-server/jupyter_server/pull/346) ([kevin-bates](https://github.com/kevin-bates)) - Apply security advisory fix to master [\#345](https://github.com/jupyter-server/jupyter_server/pull/345) ([kevin-bates](https://github.com/kevin-bates)) - Allow toggling auth for prometheus metrics [\#344](https://github.com/jupyter-server/jupyter_server/pull/344) ([yuvipanda](https://github.com/yuvipanda)) - Port Notebook PRs 5565 and 5588 - terminal shell heuristics [\#343](https://github.com/jupyter-server/jupyter_server/pull/343) ([kevin-bates](https://github.com/kevin-bates)) - Port gateway updates from notebook \(PRs 5317 and 5484\) [\#341](https://github.com/jupyter-server/jupyter_server/pull/341) ([kevin-bates](https://github.com/kevin-bates)) - add check_origin handler to gateway WebSocketChannelsHandler [\#340](https://github.com/jupyter-server/jupyter_server/pull/340) ([ricklamers](https://github.com/ricklamers)) - Remove pytest11 entrypoint and plugin, require tornado 6.1, remove asyncio patch, CI work [\#339](https://github.com/jupyter-server/jupyter_server/pull/339) ([bollwyvl](https://github.com/bollwyvl)) - Switch fixtures to use those in pytest-jupyter to avoid collisions [\#335](https://github.com/jupyter-server/jupyter_server/pull/335) ([kevin-bates](https://github.com/kevin-bates)) - Enable CodeQL runs on all pushed branches [\#333](https://github.com/jupyter-server/jupyter_server/pull/333) ([kevin-bates](https://github.com/kevin-bates)) - Asynchronous Contents API [\#324](https://github.com/jupyter-server/jupyter_server/pull/324) ([mwakaba2](https://github.com/mwakaba2)) ## 1.0.6 (2020-11-18) 1.0.6 is a security release, fixing one vulnerability: ### Changed - Fix open redirect vulnerability GHSA-grfj-wjv9-4f9v (CVE-2020-26232) ## 1.0 (2020-9-18) ### Added. - Added a basic, styled `login.html` template. ([220](https://github.com/jupyter/jupyter_server/pull/220), [295](https://github.com/jupyter/jupyter_server/pull/295)) - Added new extension manager API for handling server extensions. ([248](https://github.com/jupyter/jupyter_server/pull/248), [265](https://github.com/jupyter/jupyter_server/pull/265), [275](https://github.com/jupyter/jupyter_server/pull/275), [303](https://github.com/jupyter/jupyter_server/pull/303)) - The favicon and Jupyter logo are now available under jupyter_server's static namespace. ([284](https://github.com/jupyter/jupyter_server/pull/284)) ### Changed. - `load_jupyter_server_extension` should be renamed to `_load_jupyter_server_extension` in server extensions. Server now throws a warning when the old name is used. ([213](https://github.com/jupyter/jupyter_server/pull/213)) - Docs for server extensions now recommend using `authenticated` decorator for handlers. ([219](https://github.com/jupyter/jupyter_server/pull/219)) - `_load_jupyter_server_paths` should be renamed to `_load_jupyter_server_points` in server extensions. ([277](https://github.com/jupyter/jupyter_server/pull/277)) - `static_url_prefix` in ExtensionApps is now a configurable trait. ([289](https://github.com/jupyter/jupyter_server/pull/289)) - `extension_name` trait was removed in favor of `name`. ([232](https://github.com/jupyter/jupyter_server/pull/232)) - Dropped support for Python 3.5. ([296](https://github.com/jupyter/jupyter_server/pull/296)) - Made the `config_dir_name` trait configurable in `ConfigManager`. ([297](https://github.com/jupyter/jupyter_server/pull/297)) ### Removed for now removed features. - Removed ipykernel as a dependency of jupyter_server. ([255](https://github.com/jupyter/jupyter_server/pull/255)) ### Fixed for any bug fixes. - Prevent a re-definition of prometheus metrics if `notebook` package already imports them. ([#210](https://github.com/jupyter/jupyter_server/pull/210)) - Fixed `terminals` REST API unit tests that weren't shutting down properly. ([221](https://github.com/jupyter/jupyter_server/pull/221)) - Fixed jupyter_server on Windows for Python < 3.7. Added patch to handle subprocess cleanup. ([240](https://github.com/jupyter/jupyter_server/pull/240)) - `base_url` was being duplicated when getting a url path from the `ServerApp`. ([280](https://github.com/jupyter/jupyter_server/pull/280)) - Extension URLs are now properly prefixed with `base_url`. Previously, all `static` paths were not. ([285](https://github.com/jupyter/jupyter_server/pull/285)) - Changed ExtensionApp mixin to inherit from `HasTraits`. This broke in traitlets 5.0 ([294](https://github.com/jupyter/jupyter_server/pull/294)) - Replaces `urlparse` with `url_path_join` to prevent URL squashing issues. ([304](https://github.com/jupyter/jupyter_server/pull/304)) ## [0.3] - 2020-4-22 ### Added - ([#191](https://github.com/jupyter/jupyter_server/pull/191)) Async kernel managment is now possible using the `AsyncKernelManager` from `jupyter_client` - ([#201](https://github.com/jupyter/jupyter_server/pull/201)) Parameters can now be passed to new terminals created by the `terminals` REST API. ### Changed - ([#196](https://github.com/jupyter/jupyter_server/pull/196)) Documentation was rewritten + refactored to use pydata_sphinx_theme. - ([#174](https://github.com/jupyter/jupyter_server/pull/174)) `ExtensionHandler` was changed to an Mixin class, i.e. `ExtensionHandlerMixin` ### Removed - ([#194](https://github.com/jupyter/jupyter_server/pull/194)) The bundlerextension entry point was removed. ## [0.2.1] - 2020-1-10 ### Added - **pytest-plugin** for Jupyter Server. - Allows one to write async/await syntax in tests functions. - Some particularly useful fixtures include: - `serverapp`: a default ServerApp instance that handles setup+teardown. - `configurable_serverapp`: a function that returns a ServerApp instance. - `fetch`: an awaitable function that tests makes requests to the server API - `create_notebook`: a function that writes a notebook to a given temporary file path. ## [0.2.0] - 2019-12-19 ### Added - `extension` submodule ([#48](https://github.com/jupyter/jupyter_server/pull/48)) - ExtensionApp - configurable JupyterApp-subclass for server extensions - Most useful for Jupyter frontends, like Notebook, JupyterLab, nteract, voila etc. - Launch with entrypoints - Configure from file or CLI - Add custom templates, static assets, handlers, etc. - Static assets are served behind a `/static/` endpoint. - Run server extensions in "standalone mode" ([#70](https://github.com/jupyter/jupyter_server/pull/70) and [#76](https://github.com/jupyter/jupyter_server/pull/76)) - ExtensionHandler - tornado handlers for extensions. - Finds static assets at `/static/` ### Changed - `jupyter serverextension ` entrypoint has been changed to `jupyter server extension `. - `toggle_jupyter_server` and `validate_jupyter_server` function no longer take a Logger object as an argument. - Changed testing framework from nosetests to pytest ([#152](https://github.com/jupyter/jupyter_server/pull/152)) - Depend on pytest-tornasync extension for handling tornado/asyncio eventloop - Depend on pytest-console-scripts for testing CLI entrypoints - Added Github actions as a testing framework along side Travis and Azure ([#146](https://github.com/jupyter/jupyter_server/pull/146)) ### Removed - Removed the option to update `root_dir` trait in FileContentsManager and MappingKernelManager in ServerApp ([#135](https://github.com/jupyter/jupyter_server/pull/135)) ### Fixed - Synced Jupyter Server with Notebook PRs in batches (ended on 2019-09-27) - [Batch 1](https://github.com/jupyter/jupyter_server/pull/95) - [Batch 2](https://github.com/jupyter/jupyter_server/pull/97) - [Batch 3](https://github.com/jupyter/jupyter_server/pull/98) - [Batch 4](https://github.com/jupyter/jupyter_server/pull/99) - [Batch 5](https://github.com/jupyter/jupyter_server/pull/103) - [Batch 6](https://github.com/jupyter/jupyter_server/pull/104) - [Batch 7](https://github.com/jupyter/jupyter_server/pull/105) - [Batch 8](https://github.com/jupyter/jupyter_server/pull/106) ### Security - Added a "secure_write to function for cookie/token saves ([#77](https://github.com/jupyter/jupyter_server/pull/77)) jupyter_server-1.13.1/CONTRIBUTING.rst000066400000000000000000000113041415445537200173370ustar00rootroot00000000000000General Jupyter contributor guidelines ====================================== If you're reading this section, you're probably interested in contributing to Jupyter. Welcome and thanks for your interest in contributing! Please take a look at the Contributor documentation, familiarize yourself with using the Jupyter Server, and introduce yourself on the mailing list and share what area of the project you are interested in working on. For general documentation about contributing to Jupyter projects, see the `Project Jupyter Contributor Documentation`__. __ https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html Setting Up a Development Environment ==================================== Installing the Jupyter Server ----------------------------- The development version of the server requires `node `_ and `pip `_. Once you have installed the dependencies mentioned above, use the following steps:: pip install --upgrade setuptools pip git clone https://github.com/jupyter/jupyter_server cd jupyter_server pip install -e . If you are using a system-wide Python installation and you only want to install the server for you, you can add ``--user`` to the install commands. Once you have done this, you can launch the master branch of Jupyter server from any directory in your system with:: jupyter server Code Styling ----------------------------- `jupyter_server` has adopted automatic code formatting so you shouldn't need to worry too much about your code style. As long as your code is valid, the pre-commit hook should take care of how it should look. To install `pre-commit`, run the following:: pip install pre-commit pre-commit install You can invoke the pre-commit hook by hand at any time with:: pre-commit run which should run any autoformatting on your code and tell you about any errors it couldn't fix automatically. You may also install [black integration](https://github.com/psf/black#editor-integration) into your text editor to format code automatically. If you have already committed files before setting up the pre-commit hook with ``pre-commit install``, you can fix everything up using ``pre-commit run --all-files``. You need to make the fixing commit yourself after that. Troubleshooting the Installation -------------------------------- If you do not see that your Jupyter Server is not running on dev mode, it's possible that you are running other instances of Jupyter Server. You can try the following steps: 1. Uninstall all instances of the jupyter_server package. These include any installations you made using pip or conda 2. Run ``python3 -m pip install -e .`` in the jupyter_server repository to install the jupyter_server from there 3. Run ``npm run build`` to make sure the Javascript and CSS are updated and compiled 4. Launch with ``python3 -m jupyter_server --port 8989``, and check that the browser is pointing to ``localhost:8989`` (rather than the default 8888). You don't necessarily have to launch with port 8989, as long as you use a port that is neither the default nor in use, then it should be fine. 5. Verify the installation with the steps in the previous section. Running Tests ============= Install dependencies:: pip install -e .[test] pip install -e examples/simple # to test the examples To run the Python tests, use:: pytest jupyter_server pytest examples/simple # to test the examples Building the Docs ================= To build the documentation you'll need `Sphinx `_, `pandoc `_ and a few other packages. To install (and activate) a `conda environment`_ named ``server_docs`` containing all the necessary packages (except pandoc), use:: conda env create -f docs/environment.yml source activate server_docs # Linux and OS X activate server_docs # Windows .. _conda environment: https://conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#creating-an-environment-from-an-environment-yml-file If you want to install the necessary packages with ``pip`` instead:: pip install -r docs/doc-requirements.txt Once you have installed the required packages, you can build the docs with:: cd docs make html After that, the generated HTML files will be available at ``build/html/index.html``. You may view the docs in your browser. You can automatically check if all hyperlinks are still valid:: make linkcheck Windows users can find ``make.bat`` in the ``docs`` folder. You should also have a look at the `Project Jupyter Documentation Guide`__. __ https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html jupyter_server-1.13.1/COPYING.md000066400000000000000000000055051415445537200163360ustar00rootroot00000000000000# Licensing terms This project is licensed under the terms of the Modified BSD License (also known as New or Revised or 3-Clause BSD), as follows: - Copyright (c) 2001-2015, IPython Development Team - Copyright (c) 2015-, Jupyter Development Team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the Jupyter Development Team nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## About the Jupyter Development Team The Jupyter Development Team is the set of all contributors to the Jupyter project. This includes all of the Jupyter subprojects. The core team that coordinates development on GitHub can be found here: https://github.com/jupyter/. ## Our Copyright Policy Jupyter uses a shared copyright model. Each contributor maintains copyright over their contributions to Jupyter. But, it is important to note that these contributions are typically only changes to the repositories. Thus, the Jupyter source code, in its entirety is not the copyright of any single person or institution. Instead, it is the collective copyright of the entire Jupyter Development Team. If individual contributors want to maintain a record of what changes/contributions they have specific copyright on, they should indicate their copyright in the commit message of the change, when they commit the change to one of the Jupyter repositories. With this in mind, the following banner should be used in any source code file to indicate the copyright and license terms: # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. jupyter_server-1.13.1/MANIFEST.in000066400000000000000000000011041415445537200164310ustar00rootroot00000000000000include COPYING.md include CONTRIBUTING.rst include README.md include RELEASE.md include CHANGELOG.md include setupbase.py include package.json # include everything in package_data recursive-include jupyter_server * # Documentation graft docs exclude docs/\#* # Examples graft examples # docs subdirs we want to skip prune docs/build prune docs/gh-pages prune docs/dist # Patterns to exclude from any directory global-exclude *~ global-exclude *.pyc global-exclude *.pyo prune .git prune **/.ipynb_checkpoints prune **/.pytest_cache prune **/.coverage prune **/.pytest_cache jupyter_server-1.13.1/README.md000066400000000000000000000045411415445537200161620ustar00rootroot00000000000000# Jupyter Server [![Build Status](https://github.com/jupyter/jupyter_server/workflows/CI/badge.svg?query=branch%3Amaster++)](https://github.com/jupyter-server/jupyter_server/actions?query=branch%3Amaster++) [![Documentation Status](https://readthedocs.org/projects/jupyter-server/badge/?version=latest)](http://jupyter-server.readthedocs.io/en/latest/?badge=latest) The Jupyter Server provides the backend (i.e. the core services, APIs, and REST endpoints) for Jupyter web applications like Jupyter notebook, JupyterLab, and Voila. For more information, read our [documentation here](http://jupyter-server.readthedocs.io/en/latest/?badge=latest). ## Installation and Basic usage To install the latest release locally, make sure you have [pip installed](https://pip.readthedocs.io/en/stable/installing/) and run: pip install jupyter_server Jupyter Server currently supports Python>=3.6 on Linux, OSX and Windows. ### Versioning and Branches If Jupyter Server is a dependency of your project/application, it is important that you pin it to a version that works for your application. Currently, Jupyter Server only has minor and patch versions. Different minor versions likely include API-changes while patch versions do not change API. When a new minor version is released on PyPI, a branch for that version will be created in this repository, and the version of the master branch will be bumped to the next minor version number. That way, the master branch always reflects the latest un-released version. To see the changes between releases, checkout the [CHANGELOG](https://github.com/jupyter/jupyter_server/blob/master/CHANGELOG.md). ## Usage - Running Jupyter Server ### Running in a local installation Launch with: jupyter server ### Testing See [CONTRIBUTING](https://github.com/jupyter-server/jupyter_server/blob/master/CONTRIBUTING.rst#running-tests). ## Contributing If you are interested in contributing to the project, see [`CONTRIBUTING.rst`](CONTRIBUTING.rst). ## Team Meetings and Roadmap - When: Thursdays [8:00am, Pacific time](https://www.thetimezoneconverter.com/?t=8%3A00%20am&tz=San%20Francisco&) - Where: [Jovyan Zoom](https://zoom.us/my/jovyan?pwd=c0JZTHlNdS9Sek9vdzR3aTJ4SzFTQT09) - What: [Meeting notes](https://github.com/jupyter-server/team-compass/issues/4) See our tentative [roadmap here](https://github.com/jupyter/jupyter_server/issues/127). jupyter_server-1.13.1/RELEASE.md000066400000000000000000000015171415445537200163050ustar00rootroot00000000000000# Making a Jupyter Server Release ## Using `jupyter_releaser` The recommended way to make a release is to use [`jupyter_releaser`](https://github.com/jupyter-server/jupyter_releaser#checklist-for-adoption). ## Manual Release To create a manual release, perform the following steps: ### Set up ```bash pip install tbump twine build git pull origin $(git branch --show-current) git clean -dffx ``` ### Update the version and apply the tag ```bash echo "Enter new version" read script_version tbump ${script_version} ``` ### Build the artifacts ```bash rm -rf dist python -m build . ``` ### Update the version back to dev ```bash echo "Enter dev version" read dev_version tbump ${dev_version} --no-tag git push origin $(git branch --show-current) ``` ### Publish the artifacts to pypi ```bash twine check dist/* twine upload dist/* ``` jupyter_server-1.13.1/codecov.yml000066400000000000000000000002101415445537200170350ustar00rootroot00000000000000coverage: status: project: default: target: auto threshold: 10 patch: default: target: 0% jupyter_server-1.13.1/docs/000077500000000000000000000000001415445537200156275ustar00rootroot00000000000000jupyter_server-1.13.1/docs/Makefile000066400000000000000000000172521415445537200172760ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage spelling gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" @echo " spelling to spell check the documentation" clean: rm -rf $(BUILDDIR)/* rm -rf source/config.rst html: source/config.rst $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." source/config.rst: python3 autogen_config.py @echo "Created docs for config options" dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/JupyterNotebook.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/JupyterNotebook.qhc" applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/JupyterNotebook" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/JupyterNotebook" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." spelling: $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling @echo "Spell check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/spelling/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." jupyter_server-1.13.1/docs/README.md000066400000000000000000000002451415445537200171070ustar00rootroot00000000000000# Jupyter Server Docs Sources Read [this page](https://jupyter-server.readthedocs.io/en/latest/contributors/contributing.html#building-the-docs) to build the docs. jupyter_server-1.13.1/docs/autogen_config.py000066400000000000000000000021471415445537200211740ustar00rootroot00000000000000#!/usr/bin/env python import os from jupyter_server.serverapp import ServerApp header = """\ .. _other-full-config: Config file and command line options ==================================== The Jupyter Server can be run with a variety of command line arguments. A list of available options can be found below in the :ref:`options section `. Defaults for these options can also be set by creating a file named ``jupyter_server_config.py`` in your Jupyter folder. The Jupyter folder is in your home directory, ``~/.jupyter``. To create a ``jupyter_server_config.py`` file, with all the defaults commented out, you can use the following command line:: $ jupyter server --generate-config .. _options: Options ------- This list of options can be generated by running the following and hitting enter:: $ jupyter server --help-all """ try: destination = os.path.join(os.path.dirname(__file__), "source/other/full-config.rst") except: destination = os.path.join(os.getcwd(), "full-config.rst") with open(destination, "w") as f: f.write(header) f.write(ServerApp().document_config_options()) jupyter_server-1.13.1/docs/doc-requirements.txt000066400000000000000000000002511415445537200216540ustar00rootroot00000000000000ipykernel jinja2 jupyter_client myst-parser nbformat prometheus_client pydata_sphinx_theme Send2Trash sphinxcontrib-openapi sphinxcontrib_github_alt sphinxemoji tornado jupyter_server-1.13.1/docs/environment.yml000066400000000000000000000001661415445537200207210ustar00rootroot00000000000000name: jupyter_server_docs dependencies: - nodejs=14 - python=3.8 - pip - pip: - -r doc-requirements.txt jupyter_server-1.13.1/docs/make.bat000066400000000000000000000161471415445537200172450ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source set I18NSPHINXOPTS=%SPHINXOPTS% source if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 2> nul if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\JupyterNotebook.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\JupyterNotebook.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end jupyter_server-1.13.1/docs/source/000077500000000000000000000000001415445537200171275ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/_static/000077500000000000000000000000001415445537200205555ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/_static/.gitkeep000066400000000000000000000000001415445537200221740ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/_static/jupyter_server_logo.svg000066400000000000000000000243201415445537200254070ustar00rootroot00000000000000 image/svg+xml logo.svg logo.svg Created using Figma 0.90 server jupyter_server-1.13.1/docs/source/conf.py000066400000000000000000000306171415445537200204350ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Jupyter Server documentation build configuration file, created by # sphinx-quickstart on Mon Apr 13 09:51:11 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import os.path as osp import shutil import sys HERE = osp.abspath(osp.dirname(__file__)) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # DEBUG for RTD print("DEBUG:: sys.path") print("================") for item in sys.path: print(item) # add repo root to sys.path # here = root/docs/source here = os.path.abspath(os.path.dirname(__file__)) repo_root = os.path.dirname(os.path.dirname(here)) sys.path.insert(0, repo_root) print("repo_root") print("=====================") print(repo_root) # DEBUG for post insert on RTD print("DEBUG:: Post insert to sys.path") print("===============================") for item in sys.path: print(item) # Check if docs are being built by ReadTheDocs # If so, generate a config.rst file and populate it with documentation about # configuration options if os.environ.get("READTHEDOCS", ""): # Readthedocs doesn't run our Makefile, so we do this to force it to generate # the config docs. with open("../autogen_config.py") as f: exec(compile(f.read(), "../autogen_config.py", "exec"), {}) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "myst_parser", "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.autosummary", "sphinx.ext.mathjax", "IPython.sphinxext.ipython_console_highlighting", "sphinxcontrib_github_alt", "sphinxcontrib.openapi", "sphinxemoji.sphinxemoji", ] myst_enable_extensions = ["html_image"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".ipynb"] # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. project = "Jupyter Server" copyright = "2020, Jupyter Team, https://jupyter.org" author = "The Jupyter Team" # ghissue config github_project_url = "https://github.com/jupyter/jupyter_server" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # _version_py = "../../jupyter_server/_version.py" version_ns = {} exec(compile(open(_version_py).read(), _version_py, "exec"), version_ns) # The short X.Y version. version = "%i.%i" % version_ns["version_info"][:2] # The full version, including alpha/beta/rc tags. release = version_ns["__version__"] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "default" # highlight_language = 'python3' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # # Add custom note for each doc page # rst_prolog = "" # rst_prolog += """ # .. important:: # This documentation covers Jupyter Server, an **early developer preview**, # and is not suitable for general usage yet. Features and implementation are # subject to change. # For production use cases, please use the stable notebook server in the # `Jupyter Notebook repo `_ # and `Jupyter Notebook documentation `_. # """ # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' html_theme = "pydata_sphinx_theme" html_logo = "_static/jupyter_server_logo.svg" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # NOTE: Sphinx's 'make html' builder will throw a warning about an unfound # _static directory. Do not remove or comment out html_static_path # since it is needed to properly generate _static in the build directory html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = "JupyterServerdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "JupyterServer.tex", "Jupyter Server Documentation", "https://jupyter.org", "manual", ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "jupyterserver", "Jupyter Server Documentation", [author], 1)] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for link checks ---------------------------------------------- linkcheck_ignore = ["http://127\.0\.0\.1/*"] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "JupyterServer", "Jupyter Server Documentation", author, "JupyterServer", "One line description of project.", "Miscellaneous", ), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False intersphinx_mapping = { "ipython": ("https://ipython.readthedocs.io/en/stable/", None), "nbconvert": ("https://nbconvert.readthedocs.io/en/latest/", None), "nbformat": ("https://nbformat.readthedocs.io/en/latest/", None), "jupyter": ("https://jupyter.readthedocs.io/en/latest/", None), } spelling_lang = "en_US" spelling_word_list_filename = "spelling_wordlist.txt" # import before any doc is built, so _ is guaranteed to be injected import jupyter_server.transutils # pylint: disable=unused-import def setup(app): dest = osp.join(HERE, "other", "changelog.md") shutil.copy(osp.join(HERE, "..", "..", "CHANGELOG.md"), dest) jupyter_server-1.13.1/docs/source/contributors/000077500000000000000000000000001415445537200216645ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/contributors/contributing.rst000066400000000000000000000000721415445537200251240ustar00rootroot00000000000000.. highlight:: sh .. include:: ../../../CONTRIBUTING.rst jupyter_server-1.13.1/docs/source/contributors/index.rst000066400000000000000000000004251415445537200235260ustar00rootroot00000000000000Documentation for Contributors ------------------------------ These pages target people who are interested in contributing directly to the Jupyter Server Project. .. toctree:: :caption: Contributors :maxdepth: 1 :name: contributors team-meetings contributing jupyter_server-1.13.1/docs/source/contributors/team-meetings.rst000066400000000000000000000015341415445537200251600ustar00rootroot00000000000000.. _contributors-team-meetings-roadmap-calendar: Team Meetings, Road Map and Calendar ==================================== Many of the lead Jupyter Server developers meet weekly over Zoom. These meetings are open to everyone. To see when the next meeting is happening, watch this Github issue: https://github.com/jupyter/jupyter_server/issues/126 Also check out Jupyter Server's roadmap where we track future plans for Jupyter Server: https://github.com/jupyter/jupyter_server/issues/127 Jupyter Calendar: .. raw:: html jupyter_server-1.13.1/docs/source/developers/000077500000000000000000000000001415445537200212775ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/developers/contents.rst000066400000000000000000000264001415445537200236700ustar00rootroot00000000000000.. _contents_api: Contents API ============ .. currentmodule:: jupyter_server.services.contents The Jupyter Notebook web application provides a graphical interface for creating, opening, renaming, and deleting files in a virtual filesystem. The :class:`~manager.ContentsManager` class defines an abstract API for translating these interactions into operations on a particular storage medium. The default implementation, :class:`~filemanager.FileContentsManager`, uses the local filesystem of the server for storage and straightforwardly serializes notebooks into JSON. Users can override these behaviors by supplying custom subclasses of ContentsManager. This section describes the interface implemented by ContentsManager subclasses. We refer to this interface as the **Contents API**. Data Model ---------- .. currentmodule:: jupyter_server.services.contents.manager Filesystem Entities ~~~~~~~~~~~~~~~~~~~ .. _notebook models: ContentsManager methods represent virtual filesystem entities as dictionaries, which we refer to as **models**. Models may contain the following entries: +--------------------+-----------+------------------------------+ | Key | Type |Info | +====================+===========+==============================+ |**name** |unicode |Basename of the entity. | +--------------------+-----------+------------------------------+ |**path** |unicode |Full | | | |(:ref:`API-style`) | | | |path to the entity. | +--------------------+-----------+------------------------------+ |**type** |unicode |The entity type. One of | | | |``"notebook"``, ``"file"`` or | | | |``"directory"``. | +--------------------+-----------+------------------------------+ |**created** |datetime |Creation date of the entity. | +--------------------+-----------+------------------------------+ |**last_modified** |datetime |Last modified date of the | | | |entity. | +--------------------+-----------+------------------------------+ |**content** |variable |The "content" of the entity. | | | |(:ref:`See | | | |Below`) | +--------------------+-----------+------------------------------+ |**mimetype** |unicode or |The mimetype of ``content``, | | |``None`` |if any. (:ref:`See | | | |Below`) | +--------------------+-----------+------------------------------+ |**format** |unicode or |The format of ``content``, | | |``None`` |if any. (:ref:`See | | | |Below`) | +--------------------+-----------+------------------------------+ .. _modelcontent: Certain model fields vary in structure depending on the ``type`` field of the model. There are three model types: **notebook**, **file**, and **directory**. - ``notebook`` models - The ``format`` field is always ``"json"``. - The ``mimetype`` field is always ``None``. - The ``content`` field contains a :class:`nbformat.notebooknode.NotebookNode` representing the .ipynb file represented by the model. See the `NBFormat`_ documentation for a full description. - ``file`` models - The ``format`` field is either ``"text"`` or ``"base64"``. - The ``mimetype`` field is ``text/plain`` for text-format models and ``application/octet-stream`` for base64-format models. - The ``content`` field is always of type ``unicode``. For text-format file models, ``content`` simply contains the file's bytes after decoding as UTF-8. Non-text (``base64``) files are read as bytes, base64 encoded, and then decoded as UTF-8. - ``directory`` models - The ``format`` field is always ``"json"``. - The ``mimetype`` field is always ``None``. - The ``content`` field contains a list of :ref:`content-free` models representing the entities in the directory. .. note:: .. _contentfree: In certain circumstances, we don't need the full content of an entity to complete a Contents API request. In such cases, we omit the ``mimetype``, ``content``, and ``format`` keys from the model. This most commonly occurs when listing a directory, in which circumstance we represent files within the directory as content-less models to avoid having to recursively traverse and serialize the entire filesystem. **Sample Models** .. code-block:: python # Notebook Model with Content { 'content': { 'metadata': {}, 'nbformat': 4, 'nbformat_minor': 0, 'cells': [ { 'cell_type': 'markdown', 'metadata': {}, 'source': 'Some **Markdown**', }, ], }, 'created': datetime(2015, 7, 25, 19, 50, 19, 19865), 'format': 'json', 'last_modified': datetime(2015, 7, 25, 19, 50, 19, 19865), 'mimetype': None, 'name': 'a.ipynb', 'path': 'foo/a.ipynb', 'type': 'notebook', 'writable': True, } # Notebook Model without Content { 'content': None, 'created': datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), 'format': None, 'last_modified': datetime.datetime(2015, 7, 25, 20, 17, 33, 271931), 'mimetype': None, 'name': 'a.ipynb', 'path': 'foo/a.ipynb', 'type': 'notebook', 'writable': True } API Paths ~~~~~~~~~ .. _apipaths: ContentsManager methods represent the locations of filesystem resources as **API-style paths**. Such paths are interpreted as relative to the root directory of the notebook server. For compatibility across systems, the following guarantees are made: * Paths are always ``unicode``, not ``bytes``. * Paths are not URL-escaped. * Paths are always forward-slash (/) delimited, even on Windows. * Leading and trailing slashes are stripped. For example, ``/foo/bar/buzz/`` becomes ``foo/bar/buzz``. * The empty string (``""``) represents the root directory. Writing a Custom ContentsManager -------------------------------- The default ContentsManager is designed for users running the notebook as an application on a personal computer. It stores notebooks as .ipynb files on the local filesystem, and it maps files and directories in the Notebook UI to files and directories on disk. It is possible to override how notebooks are stored by implementing your own custom subclass of ``ContentsManager``. For example, if you deploy the notebook in a context where you don't trust or don't have access to the filesystem of the notebook server, it's possible to write your own ContentsManager that stores notebooks and files in a database. Required Methods ~~~~~~~~~~~~~~~~ A minimal complete implementation of a custom :class:`~manager.ContentsManager` must implement the following methods: .. autosummary:: ContentsManager.get ContentsManager.save ContentsManager.delete_file ContentsManager.rename_file ContentsManager.file_exists ContentsManager.dir_exists ContentsManager.is_hidden You may be required to specify a Checkpoints object, as the default one, ``FileCheckpoints``, could be incompatible with your custom ContentsManager. Customizing Checkpoints ----------------------- .. currentmodule:: jupyter_server.services.contents.checkpoints Customized Checkpoint definitions allows behavior to be altered and extended. The ``Checkpoints`` and ``GenericCheckpointsMixin`` classes (from :mod:`jupyter_server.services.contents.checkpoints`) have reusable code and are intended to be used together, but require the following methods to be implemented. .. autosummary:: Checkpoints.rename_checkpoint Checkpoints.list_checkpoints Checkpoints.delete_checkpoint GenericCheckpointsMixin.create_file_checkpoint GenericCheckpointsMixin.create_notebook_checkpoint GenericCheckpointsMixin.get_file_checkpoint GenericCheckpointsMixin.get_notebook_checkpoint No-op example ~~~~~~~~~~~~~ Here is an example of a no-op checkpoints object - note the mixin comes first. The docstrings indicate what each method should do or return for a more complete implementation. .. code-block:: python class NoOpCheckpoints(GenericCheckpointsMixin, Checkpoints): """requires the following methods:""" def create_file_checkpoint(self, content, format, path): """ -> checkpoint model""" def create_notebook_checkpoint(self, nb, path): """ -> checkpoint model""" def get_file_checkpoint(self, checkpoint_id, path): """ -> {'type': 'file', 'content': , 'format': {'text', 'base64'}}""" def get_notebook_checkpoint(self, checkpoint_id, path): """ -> {'type': 'notebook', 'content': }""" def delete_checkpoint(self, checkpoint_id, path): """deletes a checkpoint for a file""" def list_checkpoints(self, path): """returns a list of checkpoint models for a given file, default just does one per file """ return [] def rename_checkpoint(self, checkpoint_id, old_path, new_path): """renames checkpoint from old path to new path""" See ``GenericFileCheckpoints`` in :mod:`notebook.services.contents.filecheckpoints` for a more complete example. Testing ------- .. currentmodule:: jupyter_server.services.contents.tests :mod:`jupyter_server.services.contents.tests` includes several test suites written against the abstract Contents API. This means that an excellent way to test a new ContentsManager subclass is to subclass our tests to make them use your ContentsManager. .. note:: PGContents_ is an example of a complete implementation of a custom ``ContentsManager``. It stores notebooks and files in PostgreSQL_ and encodes directories as SQL relations. PGContents also provides an example of how to re-use the notebook's tests. .. _NBFormat: https://nbformat.readthedocs.io/en/latest/index.html .. _PGContents: https://github.com/quantopian/pgcontents .. _PostgreSQL: https://www.postgresql.org/ Asynchronous Support -------------------- An asynchronous version of the Contents API is available to run slow IO processes concurrently. - :class:`~manager.AsyncContentsManager` - :class:`~filemanager.AsyncFileContentsManager` - :class:`~largefilemanager.AsyncLargeFileManager` - :class:`~checkpoints.AsyncCheckpoints` - :class:`~checkpoints.AsyncGenericCheckpointsMixin` .. note:: .. _asynccontents: In most cases, the non-asynchronous Contents API is performant for local filesystems. However, if the Jupyter Notebook web application is interacting with a high-latent virtual filesystem, you may see performance gains by using the asynchronous version. For example, if you're experiencing terminal lag in the web application due to the slow and blocking file operations, the asynchronous version can reduce the lag. Before opting in, comparing both non-async and async options' performances is recommended. jupyter_server-1.13.1/docs/source/developers/dependency.rst000066400000000000000000000014731415445537200241540ustar00rootroot00000000000000Depending on Jupyter Server =========================== If your project depends directly on Jupyter Server, be sure to watch Jupyter Server's ChangeLog and pin your project to a version that works for your application. Major releases represent possible backwards-compatibility breaking API changes or features. When a new major version in released on PyPI, a branch for that version will be created in this repository, and the version of the master branch will be bumped to the next major version number. That way, the master branch always reflects the latest un-released version. To install the latest patch of a given version: .. code-block:: console > pip install jupyter_server --upgrade To pin your jupyter_server install to a specific version: .. code-block:: console > pip install jupyter_server==1.0.0 jupyter_server-1.13.1/docs/source/developers/extensions.rst000066400000000000000000000471671415445537200242470ustar00rootroot00000000000000================= Server Extensions ================= A Jupyter Server extension is typically a module or package that extends to Server’s REST API/endpoints—i.e. adds extra request handlers to Server’s Tornado Web Application. You can check some simple examples on the `examples folder `_ in the GitHub jupyter_server repository. Authoring a basic server extension ================================== The simplest way to write a Jupyter Server extension is to write an extension module with a ``_load_jupyter_server_extension`` function. This function should take a single argument, an instance of the ``ServerApp``. .. code-block:: python def _load_jupyter_server_extension(serverapp: jupyter_server.serverapp.ServerApp): """ This function is called when the extension is loaded. """ pass Adding extension endpoints -------------------------- The easiest way to add endpoints and handle incoming requests is to subclass the ``JupyterHandler`` (which itself is a subclass of Tornado's ``RequestHandler``). .. code-block:: python from jupyter_server.base.handlers import JupyterHandler import tornado class MyExtensionHandler(JupyterHandler): @tornado.web.authenticated def get(self): ... @tornado.web.authenticated def post(self): ... .. note:: It is best practice to wrap each handler method with the ``authenticated`` decorator to ensure that each request is authenticated by the server. Then add this handler to Jupyter Server's Web Application through the ``_load_jupyter_server_extension`` function. .. code-block:: python def _load_jupyter_server_extension(serverapp: jupyter_server.serverapp.ServerApp): """ This function is called when the extension is loaded. """ handlers = [ ('/myextension/hello', MyExtensionHandler) ] serverapp.web_app.add_handlers('.*$', handlers) Making an extension discoverable -------------------------------- To make this extension discoverable to Jupyter Server, first define a ``_jupyter_server_extension_points()`` function at the root of the module/package. This function returns metadata describing how to load the extension. Usually, this requires a ``module`` key with the import path to the extension's ``_load_jupyter_server_extension`` function. .. code-block:: python def _jupyter_server_extension_points(): """ Returns a list of dictionaries with metadata describing where to find the `_load_jupyter_server_extension` function. """ return [ { "module": "my_extension" } ] Second, add the extension to the ServerApp's ``jpserver_extensions`` trait. This can be manually added by users in their ``jupyter_server_config.py`` file, .. code-block:: python c.ServerApp.jpserver_extensions = { "my_extension": True } or loaded from a JSON file in the ``jupyter_server_config.d`` directory under one of `Jupyter's paths`_. (See the `Distributing a server extension`_ section for details on how to automatically enabled your extension when users install it.) .. code-block:: python { "ServerApp": { "jpserver_extensions": { "my_extension": true } } } Authoring a configurable extension application ============================================== Some extensions are full-fledged client applications that sit on top of the Jupyter Server. For example, `JupyterLab `_ is a server extension. It can be launched from the command line, configured by CLI or config files, and serves+loads static assets behind the server (i.e. html templates, Javascript, etc.) Jupyter Server offers a convenient base class, ``ExtensionsApp``, that handles most of the boilerplate code for building such extensions. Anatomy of an ``ExtensionApp`` ------------------------------ An ExtensionApp: - has traits. - is configurable (from file or CLI) - has a name (see the ``name`` trait). - has an entrypoint, ``jupyter ``. - can serve static content from the ``/static//`` endpoint. - can add new endpoints to the Jupyter Server. The basic structure of an ExtensionApp is shown below: .. code-block:: python from jupyter_server.extension.application import ExtensionApp class MyExtensionApp(ExtensionApp): # -------------- Required traits -------------- name = "myextension" default_url = "/myextension" load_other_extensions = True file_url_prefix = "/render" # --- ExtensionApp traits you can configure --- static_paths = [...] template_paths = [...] settings = {...} handlers = [...] # ----------- add custom traits below --------- ... def initialize_settings(self): ... # Update the self.settings trait to pass extra # settings to the underlying Tornado Web Application. self.settings.update({'':...}) def initialize_handlers(self): ... # Extend the self.handlers trait self.handlers.extend(...) def initialize_templates(self): ... # Change the jinja templating environment async def stop_extension(self): ... # Perform any required shut down steps The ``ExtensionApp`` uses the following methods and properties to connect your extension to the Jupyter server. You do not need to define a ``_load_jupyter_server_extension`` function for these apps. Instead, overwrite the pieces below to add your custom settings, handlers and templates: Methods * ``initialize_settings()``: adds custom settings to the Tornado Web Application. * ``initialize_handlers()``: appends handlers to the Tornado Web Application. * ``initialize_templates()``: initialize the templating engine (e.g. jinja2) for your frontend. * ``stop_extension()``: called on server shut down. Properties * ``name``: the name of the extension * ``default_url``: the default URL for this extension—i.e. the landing page for this extension when launched from the CLI. * ``load_other_extensions``: a boolean enabling/disabling other extensions when launching this extension directly. * ``file_url_prefix``: the prefix URL added when opening a document directly from the command line. For example, classic Notebook uses ``/notebooks`` to open a document at http://localhost:8888/notebooks/path/to/notebook.ipynb. ``ExtensionApp`` request handlers --------------------------------- ``ExtensionApp`` Request Handlers have a few extra properties. * ``config``: the ExtensionApp's config object. * ``server_config``: the ServerApp's config object. * ``name``: the name of the extension to which this handler is linked. * ``static_url()``: a method that returns the url to static files (prefixed with ``/static/``). Jupyter Server provides a convenient mixin class for adding these properties to any ``JupyterHandler``. For example, the basic server extension handler in the section above becomes: .. code-block:: python from jupyter_server.base.handlers import JupyterHandler from jupyter_server.extension.handler import ExtensionHandlerMixin import tornado class MyExtensionHandler(ExtensionHandlerMixin, JupyterHandler): @tornado.web.authenticated def get(self): ... @tornado.web.authenticated def post(self): ... Jinja templating from frontend extensions ----------------------------------------- Many Jupyter frontend applications use Jinja for basic HTML templating. Since this is common enough, Jupyter Server provides some extra mixin that integrate Jinja with Jupyter server extensions. Use ``ExtensionAppJinjaMixin`` to automatically add a Jinja templating environment to an ``ExtensionApp``. This adds a ``_jinja2_env`` setting to Tornado Web Server's settings that will be used by request handlers. .. code-block:: python from jupyter_server.extension.application import ExtensionApp, ExtensionAppJinjaMixin class MyExtensionApp(ExtensionAppJinjaMixin, ExtensionApp): ... Pair the example above with ``ExtensionHandlers`` that also inherit the ``ExtensionHandlerJinjaMixin`` mixin. This will automatically load HTML templates from the Jinja templating environment created by the ``ExtensionApp``. .. code-block:: python from jupyter_server.base.handlers import JupyterHandler from jupyter_server.extension.handler import ( ExtensionHandlerMixin, ExtensionHandlerJinjaMixin ) import tornado class MyExtensionHandler( ExtensionHandlerMixin, ExtensionHandlerJinjaMixin, JupyterHandler ): @tornado.web.authenticated def get(self): ... @tornado.web.authenticated def post(self): ... .. note:: The mixin classes in this example must come before the base classes, ``ExtensionApp`` and ``ExtensionHandler``. Making an ``ExtensionApp`` discoverable --------------------------------------- To make an ``ExtensionApp`` discoverable by Jupyter Server, add the ``app`` key+value pair to the ``_jupyter_server_extension_points()`` function example above: .. code-block:: python from myextension import MyExtensionApp def _jupyter_server_extension_points(): """ Returns a list of dictionaries with metadata describing where to find the `_load_jupyter_server_extension` function. """ return [ { "module": "myextension", "app": MyExtensionApp } ] Launching an ``ExtensionApp`` ----------------------------- To launch the application, simply call the ``ExtensionApp``'s ``launch_instance`` method. .. code-block:: python launch_instance = MyFrontend.launch_instance launch_instance() To make your extension executable from anywhere on your system, point an entry-point at the ``launch_instance`` method in the extension's ``setup.py``: .. code-block:: python from setuptools import setup setup( name='myfrontend', ... entry_points={ 'console_scripts': [ 'jupyter-myextension = myextension:launch_instance' ] } ) ``ExtensionApp`` as a classic Notebook server extension ------------------------------------------------------- An extension that extends ``ExtensionApp`` should still work with the old Tornado server from the classic Jupyter Notebook. The ``ExtensionApp`` class provides a method, ``load_classic_server_extension``, that handles the extension initialization. Simply define a ``load_jupyter_server_extension`` reference pointing at the ``load_classic_server_extension`` method: .. code-block:: python # This is typically defined in the root `__init__.py` # file of the extension package. load_jupyter_server_extension = MyExtensionApp.load_classic_server_extension If the extension is enabled, the extension will be loaded when the server starts. Distributing a server extension =============================== Putting it all together, authors can distribute their extension following this steps: 1. Add a ``_jupyter_server_extension_points()`` function at the extension's root. This function should likely live in the ``__init__.py`` found at the root of the extension package. It will look something like this: .. code-block:: python # Found in the __init__.py of package def _jupyter_server_extension_points(): return [ { "module": "myextension.app", "app": MyExtensionApp } ] 2. Create an extension by writing a ``_load_jupyter_server_extension()`` function or subclassing ``ExtensionApp``. This is where the extension logic will live (i.e. custom extension handlers, config, etc). See the sections above for more information on how to create an extension. 3. Add the following JSON config file to the extension package. The file should be named after the extension (e.g. ``myextension.json``) and saved in a subdirectory of the package with the prefix: ``jupyter-config/jupyter_server_config.d/``. The extension package will have a similar structure to this example: .. code-block:: myextension ├── myextension/ │ ├── __init__.py │ └── app.py ├── jupyter-config/ │ └── jupyter_server_config.d/ │ └── myextension.json └── setup.py The contents of the JSON file will tell Jupyter Server to load the extension when a user installs the package: .. code-block:: json { "ServerApp": { "jpserver_extensions": { "myextension": true } } } When the extension is installed, this JSON file will be copied to the ``jupyter_server_config.d`` directory found in one of `Jupyter's paths`_. Users can toggle the enabling/disableing of extension using the command: .. code-block:: console jupyter server disable myextension which will change the boolean value in the JSON file above. 4. Create a ``setup.py`` that automatically enables the extension. Add a few extra lines the extension package's ``setup`` function .. code-block:: python from setuptools import setup setup( name="myextension", ... include_package_data=True, data_files=[ ( "etc/jupyter/jupyter_server_config.d", ["jupyter-config/jupyter_server_config.d/myextension.json"] ), ] ) .. links .. _`Jupyter's paths`: https://jupyter.readthedocs.io/en/latest/use/jupyter-directories.html Migrating an extension to use Jupyter Server ============================================ If you're a developer of a `classic Notebook Server`_ extension, your extension should be able to work with *both* the classic notebook server and ``jupyter_server``. There are a few key steps to make this happen: 1. Point Jupyter Server to the ``load_jupyter_server_extension`` function with a new reference name. The ``load_jupyter_server_extension`` function was the key to loading a server extension in the classic Notebook Server. Jupyter Server expects the name of this function to be prefixed with an underscore—i.e. ``_load_jupyter_server_extension``. You can easily achieve this by adding a reference to the old function name with the new name in the same module. .. code-block:: python def load_jupyter_server_extension(nb_server_app): ... # Reference the old function name with the new function name. _load_jupyter_server_extension = load_jupyter_server_extension 2. Add new data files to your extension package that enable it with Jupyter Server. This new file can go next to your classic notebook server data files. Create a new sub-directory, ``jupyter_server_config.d``, and add a new ``.json`` file there: .. raw:: html
        myextension
        ├── myextension/
        │   ├── __init__.py
        │   └── app.py
        ├── jupyter-config/
        │   └── jupyter_notebook_config.d/
        │       └── myextension.json
        │   └── jupyter_server_config.d/└── myextension.json
        └── setup.py
        
The new ``.json`` file should look something like this (you'll notice the changes in the configured class and trait names): .. code-block:: json { "ServerApp": { "jpserver_extensions": { "myextension": true } } } Update your extension package's ``setup.py`` so that the data-files are moved into the jupyter configuration directories when users download the package. .. code-block:: python from setuptools import setup setup( name="myextension", ... include_package_data=True, data_files=[ ( "etc/jupyter/jupyter_server_config.d", ["jupyter-config/jupyter_server_config.d/myextension.json"] ), ( "etc/jupyter/jupyter_notebook_config.d", ["jupyter-config/jupyter_notebook_config.d/myextension.json"] ), ] ) 3. (Optional) Point extension at the new favicon location. The favicons in the Jupyter Notebook have been moved to a new location in Jupyter Server. If your extension is using one of these icons, you'll want to add a set of redirect handlers this. (In ``ExtensionApp``, this is handled automatically). This usually means adding a chunk to your ``load_jupyter_server_extension`` function similar to this: .. code-block:: python def load_jupyter_server_extension(nb_server_app): web_app = nb_server_app.web_app host_pattern = '.*$' base_url = web_app.settings['base_url'] # Add custom extensions handler. custom_handlers = [ ... ] # Favicon redirects. favicon_redirects = [ ( url_path_join(base_url, "/static/favicons/favicon.ico"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon.ico") ), ( url_path_join(base_url, "/static/favicons/favicon-busy-1.ico"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-1.ico")} ), ( url_path_join(base_url, "/static/favicons/favicon-busy-2.ico"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-2.ico")} ), ( url_path_join(base_url, "/static/favicons/favicon-busy-3.ico"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-busy-3.ico")} ), ( url_path_join(base_url, "/static/favicons/favicon-file.ico"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-file.ico")} ), ( url_path_join(base_url, "/static/favicons/favicon-notebook.ico"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-notebook.ico")} ), ( url_path_join(base_url, "/static/favicons/favicon-terminal.ico"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon-terminal.ico")} ), ( url_path_join(base_url, "/static/logo/logo.png"), RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")} ), ] web_app.add_handlers( host_pattern, custom_handlers + favicon_redirects ) .. _`classic Notebook Server`: https://jupyter-notebook.readthedocs.io/en/stable/extending/handlers.html jupyter_server-1.13.1/docs/source/developers/index.rst000066400000000000000000000005631415445537200231440ustar00rootroot00000000000000Documentation for Developers ---------------------------- These pages target people writing Jupyter Web applications and server extensions, or people who need to dive deeper in Jupyter Server's REST API and configuration system. .. toctree:: :caption: Developers :maxdepth: 1 :name: developers dependency rest-api extensions savehooks contents jupyter_server-1.13.1/docs/source/developers/rest-api.rst000066400000000000000000000004141415445537200235540ustar00rootroot00000000000000The REST API ============ An interactive version is available `here `_. .. openapi:: ../../../jupyter_server/services/api/api.yaml jupyter_server-1.13.1/docs/source/developers/savehooks.rst000066400000000000000000000052111415445537200240320ustar00rootroot00000000000000File save hooks =============== You can configure functions that are run whenever a file is saved. There are two hooks available: * ``ContentsManager.pre_save_hook`` runs on the API path and model with content. This can be used for things like stripping output that people don't like adding to VCS noise. * ``FileContentsManager.post_save_hook`` runs on the filesystem path and model without content. This could be used to commit changes after every save, for instance. They are both called with keyword arguments:: pre_save_hook(model=model, path=path, contents_manager=cm) post_save_hook(model=model, os_path=os_path, contents_manager=cm) Examples -------- These can both be added to :file:`jupyter_server_config.py`. A pre-save hook for stripping output:: def scrub_output_pre_save(model, **kwargs): """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None c.FileContentsManager.pre_save_hook = scrub_output_pre_save A post-save hook to make a script equivalent whenever the notebook is saved (replacing the ``--script`` option in older versions of the notebook): .. code-block:: python import io import os from jupyter_server.utils import to_api_path _script_exporter = None def script_post_save(model, os_path, contents_manager, **kwargs): """convert notebooks to Python script after save with nbconvert replaces `ipython notebook --script` """ from nbconvert.exporters.script import ScriptExporter if model['type'] != 'notebook': return global _script_exporter if _script_exporter is None: _script_exporter = ScriptExporter(parent=contents_manager) log = contents_manager.log base, ext = os.path.splitext(os_path) py_fname = base + '.py' script, resources = _script_exporter.from_filename(os_path) script_fname = base + resources.get('output_extension', '.txt') log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir)) with io.open(script_fname, 'w', encoding='utf-8') as f: f.write(script) c.FileContentsManager.post_save_hook = script_post_save This could be a simple call to ``jupyter nbconvert --to script``, but spawning the subprocess every time is quite slow. jupyter_server-1.13.1/docs/source/index.rst000066400000000000000000000037601415445537200207760ustar00rootroot00000000000000Welcome! ======== You've landed on the documentation pages for the **Jupyter Server** Project. Some other pages you may have been looking for: * `Jupyter Server Github Repo `_, the source code we describe in this code. * `Jupyter Notebook Github Repo `_ , the source code for the classic Notebook. * `JupyterLab Github Repo `_, the JupyterLab server wich runs on the Jupyter Server. Introduction ------------ Jupyter Server is the backend—the core services, APIs, and `REST endpoints`_—to Jupyter web applications. .. note:: Jupyter Server is a replacement for the Tornado Web Server in `Jupyter Notebook`_. Jupyter web applications should move to using Jupyter Server. For help, see the :ref:`migrate_from_notebook` page. .. _Tornado: https://www.tornadoweb.org/en/stable/ .. _Jupyter Notebook: https://github.com/jupyter/notebook .. _REST endpoints: https://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyter_server/master/jupyter_server/services/api/api.yaml Who's this for? --------------- The Jupyter Server is a highly technical piece of the Jupyter Stack, so we've separated documentation to help specific personas: 1. :ref:`Users `: people using Jupyter web applications. 2. :ref:`Operators `: people deploying or serving Jupyter web applications to others. 3. :ref:`Developers `: people writing Jupyter Server extensions and web applications. 4. :ref:`Contributors `: people contributing directly to the Jupyter Server library. If you finds gaps in our documentation, please open an issue (or better, a pull request) on the Jupyter Server `Github repo `_. Table of Contents ----------------- .. toctree:: :maxdepth: 2 Users Operators Developers Contributors Other jupyter_server-1.13.1/docs/source/operators/000077500000000000000000000000001415445537200211455ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/operators/configuring-extensions.rst000066400000000000000000000041431415445537200264100ustar00rootroot00000000000000.. _configure-multiple-extensions: Configuring Extensions ====================== Some Jupyter Server extensions are also configurable applications. There are two ways to configure such extensions: i) pass arguments to the extension's entry point or ii) list configurable options in a Jupyter config file. Jupyter Server looks for an extension's config file in a set of specific paths. Use the ``jupyter`` entry point to list these paths: .. code-block:: console > jupyter --paths config: /Users/username/.jupyter /usr/local/etc/jupyter /etc/jupyter data: /Users/username/Library/Jupyter /usr/local/share/jupyter /usr/share/jupyter runtime: /Users/username/Library/Jupyter/runtime Extension config from file -------------------------- Jupyter Server expects the file to be named after the extension's name like so: ``jupyter_{name}_config``. For example, the Jupyter Notebook's config file is ``jupyter_notebook_config``. Configuration files can be Python or JSON files. In Python config files, each trait will be prefixed with ``c.`` that links the trait to the config loader. For example, Jupyter Notebook config might look like: .. code-block:: python # jupyter_notebook_config.py c.NotebookApp.mathjax_enabled = False A Jupyter Server will automatically load config for each enabled extension. You can configure each extension by creating their corresponding Jupyter config file. Extension config on the command line ------------------------------------ Server extension applications can also be configured from the command line, and multiple extension can be configured at the same time. Simply pass the traits (with their appropriate prefix) to the ``jupyter server`` entrypoint, e.g.: .. code-block:: console > jupyter server --ServerApp.port=9999 --MyExtension1.trait=False --MyExtension2.trait=True This will also work with any extension entrypoints that allow other extensions to run side-by-side, e.g.: .. code-block:: console > jupyter myextension --ServerApp.port=9999 --MyExtension1.trait=False --MyExtension2.trait=True jupyter_server-1.13.1/docs/source/operators/index.rst000066400000000000000000000005501415445537200230060ustar00rootroot00000000000000Documentation for Operators =========================== These pages are targeted at people using, configuring, and/or deploying multiple Jupyter Web Application with Jupyter Server. .. toctree:: :caption: Operators :maxdepth: 1 :name: operators multiple-extensions configuring-extensions migrate-from-nbserver public-server security jupyter_server-1.13.1/docs/source/operators/ipython_security.asc000066400000000000000000000060611415445537200252610ustar00rootroot00000000000000-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v2.0.22 (GNU/Linux) mQINBFMx2LoBEAC9xU8JiKI1VlCJ4PT9zqhU5nChQZ06/bj1BBftiMJG07fdGVO0 ibOn4TrCoRYaeRlet0UpHzxT4zDa5h3/usJaJNTSRwtWePw2o7Lik8J+F3LionRf 8Jz81WpJ+81Klg4UWKErXjBHsu/50aoQm6ZNYG4S2nwOmMVEC4nc44IAA0bb+6kW saFKKzEDsASGyuvyutdyUHiCfvvh5GOC2h9mXYvl4FaMW7K+d2UgCYERcXDNy7C1 Bw+uepQ9ELKdG4ZpvonO6BNr1BWLln3wk93AQfD5qhfsYRJIyj0hJlaRLtBU3i6c xs+gQNF4mPmybpPSGuOyUr4FYC7NfoG7IUMLj+DYa6d8LcMJO+9px4IbdhQvzGtC qz5av1TX7/+gnS4L8C9i1g8xgI+MtvogngPmPY4repOlK6y3l/WtxUPkGkyYkn3s RzYyE/GJgTwuxFXzMQs91s+/iELFQq/QwmEJf+g/QYfSAuM+lVGajEDNBYVAQkxf gau4s8Gm0GzTZmINilk+7TxpXtKbFc/Yr4A/fMIHmaQ7KmJB84zKwONsQdVv7Jjj 0dpwu8EIQdHxX3k7/Q+KKubEivgoSkVwuoQTG15X9xrOsDZNwfOVQh+JKazPvJtd SNfep96r9t/8gnXv9JI95CGCQ8lNhXBUSBM3BDPTbudc4b6lFUyMXN0mKQARAQAB tCxJUHl0aG9uIFNlY3VyaXR5IFRlYW0gPHNlY3VyaXR5QGlweXRob24ub3JnPokC OAQTAQIAIgUCUzHYugIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQEwJc LcmZYkjuXg//R/t6nMNQmf9W1h52IVfUbRAVmvZ5d063hQHKV2dssxtnA2dRm/x5 JZu8Wz7ZrEZpyqwRJO14sxN1/lC3v+zs9XzYXr2lBTZuKCPIBypYVGIynCuWJBQJ rWnfG4+u1RHahnjqlTWTY1C/le6v7SjAvCb6GbdA6k4ZL2EJjQlRaHDmzw3rV/+l LLx6/tYzIsotuflm/bFumyOMmpQQpJjnCkWIVjnRICZvuAn97jLgtTI0+0Rzf4Zb k2BwmHwDRqWCTTcRI9QvTl8AzjW+dNImN22TpGOBPfYj8BCZ9twrpKUbf+jNqJ1K THQzFtpdJ6SzqiFVm74xW4TKqCLkbCQ/HtVjTGMGGz/y7KTtaLpGutQ6XE8SSy6P EffSb5u+kKlQOWaH7Mc3B0yAojz6T3j5RSI8ts6pFi6pZhDg9hBfPK2dT0v/7Mkv E1Z7q2IdjZnhhtGWjDAMtDDn2NbY2wuGoa5jAWAR0WvIbEZ3kOxuLE5/ZOG1FyYm noJRliBz7038nT92EoD5g1pdzuxgXtGCpYyyjRZwaLmmi4CvA+oThKmnqWNY5lyY ricdNHDiyEXK0YafJL1oZgM86MSb0jKJMp5U11nUkUGzkroFfpGDmzBwAzEPgeiF 40+qgsKB9lqwb3G7PxvfSi3XwxfXgpm1cTyEaPSzsVzve3d1xeqb7Yq5Ag0EUzHY ugEQALQ5FtLdNoxTxMsgvrRr1ejLiUeRNUfXtN1TYttOfvAhfBVnszjtkpIW8DCB JF/bA7ETiH8OYYn/Fm6MPI5H64IHEncpzxjf57jgpXd9CA9U2OMk/P1nve5zYchP QmP2fJxeAWr0aRH0Mse5JS5nCkh8Xv4nAjsBYeLTJEVOb1gPQFXOiFcVp3gaKAzX GWOZ/mtG/uaNsabH/3TkcQQEgJefd11DWgMB7575GU+eME7c6hn3FPITA5TC5HUX azvjv/PsWGTTVAJluJ3fUDvhpbGwYOh1uV0rB68lPpqVIro18IIJhNDnccM/xqko 4fpJdokdg4L1wih+B04OEXnwgjWG8OIphR/oL/+M37VV2U7Om/GE6LGefaYccC9c tIaacRQJmZpG/8RsimFIY2wJ07z8xYBITmhMmOt0bLBv0mU0ym5KH9Dnru1m9QDO AHwcKrDgL85f9MCn+YYw0d1lYxjOXjf+moaeW3izXCJ5brM+MqVtixY6aos3YO29 J7SzQ4aEDv3h/oKdDfZny21jcVPQxGDui8sqaZCi8usCcyqWsKvFHcr6vkwaufcm 3Knr2HKVotOUF5CDZybopIz1sJvY/5Dx9yfRmtivJtglrxoDKsLi1rQTlEQcFhCS ACjf7txLtv03vWHxmp4YKQFkkOlbyhIcvfPVLTvqGerdT2FHABEBAAGJAh8EGAEC AAkFAlMx2LoCGwwACgkQEwJcLcmZYkgK0BAAny0YUugpZldiHzYNf8I6p2OpiDWv ZHaguTTPg2LJSKaTd+5UHZwRFIWjcSiFu+qTGLNtZAdcr0D5f991CPvyDSLYgOwb Jm2p3GM2KxfECWzFbB/n/PjbZ5iky3+5sPlOdBR4TkfG4fcu5GwUgCkVe5u3USAk C6W5lpeaspDz39HAPRSIOFEX70+xV+6FZ17B7nixFGN+giTpGYOEdGFxtUNmHmf+ waJoPECyImDwJvmlMTeP9jfahlB6Pzaxt6TBZYHetI/JR9FU69EmA+XfCSGt5S+0 Eoc330gpsSzo2VlxwRCVNrcuKmG7PsFFANok05ssFq1/Djv5rJ++3lYb88b8HSP2 3pQJPrM7cQNU8iPku9yLXkY5qsoZOH+3yAia554Dgc8WBhp6fWh58R0dIONQxbbo apNdwvlI8hKFB7TiUL6PNShE1yL+XD201iNkGAJXbLMIC1ImGLirUfU267A3Cop5 hoGs179HGBcyj/sKA3uUIFdNtP+NndaP3v4iYhCitdVCvBJMm6K3tW88qkyRGzOk 4PW422oyWKwbAPeMk5PubvEFuFAIoBAFn1zecrcOg85RzRnEeXaiemmmH8GOe1Xu Kh+7h8XXyG6RPFy8tCcLOTk+miTqX+4VWy+kVqoS2cQ5IV8WsJ3S7aeIy0H89Z8n 5vmLc+Ibz+eT+rM= =XVDe -----END PGP PUBLIC KEY BLOCK----- jupyter_server-1.13.1/docs/source/operators/migrate-from-nbserver.rst000066400000000000000000000030221415445537200261110ustar00rootroot00000000000000.. _migrate_from_notebook: Migrating from Notebook Server ============================== To migrate from notebook server to plain jupyter server, follow these steps: - Rename your ``jupyter_notebook_config.py`` file to ``jupyter_server_config.py``. - Rename all ``c.NotebookApp`` traits to ``c.ServerApp``. For example if you have the following ``jupyter_notebook_config.py``. .. code-block:: python c.NotebookApp.allow_credentials = False c.NotebookApp.port = 8889 c.NotebookApp.password_required = True You will have to create the following ``jupyter_server_config.py`` file. .. code-block:: python c.ServerApp.allow_credentials = False c.ServerApp.port = 8889 c.ServerApp.password_required = True Running Jupyter Notebook on Jupyter Server ========================================== If you want to switch to Jupyter Server, but you still want to serve `Jupyter Notebook `_ to users, you can try `NBClassic `_. NBClassic is a Jupyter Server extension that serves the Notebook frontend (i.e. all static assets) on top of Jupyter Server. It even loads Jupyter Notebook's config files. .. warning:: NBClassic will only work for a limited time. Jupyter Server is likely to evolve beyond a point where Jupyter Notebook frontend will no longer work with the underlying server. Consider switching to `JupyterLab `_ or `nteract `_ where there is active development happening. jupyter_server-1.13.1/docs/source/operators/multiple-extensions.rst000066400000000000000000000066501415445537200257360ustar00rootroot00000000000000 .. _managing-multiple-extensions: Managing multiple extensions ---------------------------- One of the major benefits of Jupyter Server is that you can run serve multiple Jupyter frontend applications above the same Tornado web server. That's because every Jupyter frontend application is now a server extension. When you run a Jupyter Server will multiple extensions enabled, each extension appends its own set of handlers and static assets to the server. Listing extensions ~~~~~~~~~~~~~~~~~~ When you install a Jupyter Server extension, it *should* automatically add itself to your list of enabled extensions. You can see a list of installed extensions by calling: .. code-block:: console > jupyter server extension list config dir: /Users/username/etc/jupyter myextension enabled - Validating myextension... myextension OK Enabling/disabling extensions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You enable/disable an extension using the following commands: .. code-block:: console > jupyter server extension enable myextension Enabling: myextension - Validating myextension... myextension OK - Extension successfully enabled. > jupyter server extension disable myextension Disabling: jupyter_home - Validating jupyter_home... jupyter_home OK - Extension successfully disabled. Running an extensions from its entrypoint ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Extensions that are also Jupyter applications (i.e. Notebook, JupyterLab, Voila, etc.) can be launched from a CLI entrypoint. For example, launch Jupyter Notebook using: .. code-block:: console > jupyter notebook Jupyter Server will automatically start a server and the browser will be routed to Jupyter Notebook's default URL (typically, ``/tree``). Other enabled extension will still be available to the user. The entrypoint simply offers a more direct (backwards compatible) launching mechanism. Launching a server with multiple extensions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If multiple extensions are enabled, a Jupyter Server can be launched directly: .. code-block:: console > jupyter server [I 2020-03-23 15:44:53.290 ServerApp] Serving notebooks from local directory: /Users/username/path [I 2020-03-23 15:44:53.290 ServerApp] Jupyter Server 0.3.0.dev is running at: [I 2020-03-23 15:44:53.290 ServerApp] http://localhost:8888/?token=<...> [I 2020-03-23 15:44:53.290 ServerApp] or http://127.0.0.1:8888/?token=<...> [I 2020-03-23 15:44:53.290 ServerApp] Use Control-C to stop this server and shut down all kernels (twice to skip confirmation). [I 2020-03-23 15:44:53.290 ServerApp] Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at https://jupyter.org/community.html. [C 2020-03-23 15:44:53.296 ServerApp] To access the server, open this file in a browser: file:///Users/username/pathjpserver-####-open.html Or copy and paste one of these URLs: http://localhost:8888/?token=<...> or http://127.0.0.1:8888/?token=<...> Extensions can also be enabled manually from the Jupyter Server entrypoint using the ``jpserver_extensions`` trait: .. code-block:: console > jupyter server --ServerApp.jpserver_extensions='{"myextension":{"enabled": True}}' jupyter_server-1.13.1/docs/source/operators/public-server.rst000066400000000000000000000432351415445537200244700ustar00rootroot00000000000000.. _working_remotely: Running a public Jupyter Server =============================== The Jupyter Server uses a :ref:`two-process kernel architecture ` based on ZeroMQ_, as well as Tornado_ for serving HTTP requests. .. note:: By default, Jupyter Server runs locally at 127.0.0.1:8888 and is accessible only from `localhost`. You may access the server from the browser using `http://127.0.0.1:8888`. This document describes how you can :ref:`secure a Jupyter server ` and how to :ref:`run it on a public interface `. .. important:: **This is not the multi-user server you are looking for**. This document describes how you can run a public server with a single user. This should only be done by someone who wants remote access to their personal machine. Even so, doing this requires a thorough understanding of the set-ups limitations and security implications. If you allow multiple users to access a Jupyter server as it is described in this document, their commands may collide, clobber and overwrite each other. If you want a multi-user server, the official solution is JupyterHub_. To use JupyterHub, you need a Unix server (typically Linux) running somewhere that is accessible to your users on a network. This may run over the public internet, but doing so introduces additional `security concerns `_. .. _ZeroMQ: https://zeromq.org/ .. _Tornado: with Found to http://www.tornadoweb.org/en/stable/ .. _JupyterHub: https://jupyterhub.readthedocs.io/en/latest/ .. _Jupyter_server_security: Securing a Jupyter server ------------------------- You can protect your Jupyter server with a simple single password. As of notebook 5.0 this can be done automatically. To set up a password manually you can configure the :attr:`ServerApp.password` setting in :file:`jupyter_server_config.py`. Prerequisite: A Jupyter server configuration file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Check to see if you have a Jupyter server configuration file, :file:`jupyter_server_config.py`. The default location for this file is your Jupyter folder located in your home directory: - Windows: :file:`C:\\Users\\USERNAME\\.jupyter\\jupyter_server_config.py` - OS X: :file:`/Users/USERNAME/.jupyter/jupyter_server_config.py` - Linux: :file:`/home/USERNAME/.jupyter/jupyter_server_config.py` If you don't already have a Jupyter folder, or if your Jupyter folder doesn't contain a Jupyter server configuration file, run the following command:: $ jupyter server --generate-config This command will create the Jupyter folder if necessary, and create a Jupyter server configuration file, :file:`jupyter_server_config.py`, in this folder. Automatic Password setup ~~~~~~~~~~~~~~~~~~~~~~~~ As of notebook 5.3, the first time you log-in using a token, the server should give you the opportunity to setup a password from the user interface. You will be presented with a form asking for the current _token_, as well as your _new_ _password_ ; enter both and click on ``Login and setup new password``. Next time you need to log in you'll be able to use the new password instead of the login token, otherwise follow the procedure to set a password from the command line. The ability to change the password at first login time may be disabled by integrations by setting the ``--ServerApp.allow_password_change=False`` Starting at notebook version 5.0, you can enter and store a password for your server with a single command. :command:`jupyter server password` will prompt you for your password and record the hashed password in your :file:`jupyter_server_config.json`. .. code-block:: bash $ jupyter server password Enter password: **** Verify password: **** [JupyterPasswordApp] Wrote hashed password to /Users/you/.jupyter/jupyter_server_config.json This can be used to reset a lost password; or if you believe your credentials have been leaked and desire to change your password. Changing your password will invalidate all logged-in sessions after a server restart. .. _hashed-pw: Preparing a hashed password ~~~~~~~~~~~~~~~~~~~~~~~~~~~ You can prepare a hashed password manually, using the function :func:`notebook.auth.security.passwd`: .. code-block:: ipython In [1]: from jupyter_server.auth import passwd In [2]: passwd() Enter password: Verify password: Out[2]: 'sha1:67c9e60bb8b6:9ffede0825894254b2e042ea597d771089e11aed' .. caution:: :func:`~notebook.auth.security.passwd` when called with no arguments will prompt you to enter and verify your password such as in the above code snippet. Although the function can also be passed a string as an argument such as ``passwd('mypassword')``, please **do not** pass a string as an argument inside an IPython session, as it will be saved in your input history. Adding hashed password to your notebook configuration file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You can then add the hashed password to your :file:`jupyter_server_config.py`. The default location for this file :file:`jupyter_server_config.py` is in your Jupyter folder in your home directory, ``~/.jupyter``, e.g.:: c.ServerApp.password = u'sha1:67c9e60bb8b6:9ffede0825894254b2e042ea597d771089e11aed' Automatic password setup will store the hash in ``jupyter_server_config.json`` while this method stores the hash in ``jupyter_server_config.py``. The ``.json`` configuration options take precedence over the ``.py`` one, thus the manual password may not take effect if the Json file has a password set. Using SSL for encrypted communication ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When using a password, it is a good idea to also use SSL with a web certificate, so that your hashed password is not sent unencrypted by your browser. .. important:: Web security is rapidly changing and evolving. We provide this document as a convenience to the user, and recommend that the user keep current on changes that may impact security, such as new releases of OpenSSL. The Open Web Application Security Project (`OWASP`_) website is a good resource on general security issues and web practices. You can start the notebook to communicate via a secure protocol mode by setting the ``certfile`` option to your self-signed certificate, i.e. ``mycert.pem``, with the command:: $ jupyter server --certfile=mycert.pem --keyfile mykey.key .. tip:: A self-signed certificate can be generated with ``openssl``. For example, the following command will create a certificate valid for 365 days with both the key and certificate data written to the same file:: $ openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout mykey.key -out mycert.pem When starting the notebook server, your browser may warn that your self-signed certificate is insecure or unrecognized. If you wish to have a fully compliant self-signed certificate that will not raise warnings, it is possible (but rather involved) to create one, as explained in detail in this `tutorial`_. Alternatively, you may use `Let's Encrypt`_ to acquire a free SSL certificate and follow the steps in :ref:`using-lets-encrypt` to set up a public server. .. _OWASP: https://www.owasp.org/index.php/Main_Page .. _tutorial: https://arstechnica.com/information-technology/2009/12/how-to-get-set-with-a-secure-sertificate-for-free/ .. _jupyter_public_server: Running a public notebook server -------------------------------- If you want to access your notebook server remotely via a web browser, you can do so by running a public notebook server. For optimal security when running a public notebook server, you should first secure the server with a password and SSL/HTTPS as described in :ref:`jupyter_server_security`. Start by creating a certificate file and a hashed password, as explained in :ref:`jupyter_server_security`. If you don't already have one, create a config file for the notebook using the following command line:: $ jupyter server --generate-config In the ``~/.jupyter`` directory, edit the notebook config file, ``jupyter_server_config.py``. By default, the notebook config file has all fields commented out. The minimum set of configuration options that you should uncomment and edit in :file:`jupyter_server_config.py` is the following:: # Set options for certfile, ip, password, and toggle off # browser auto-opening c.ServerApp.certfile = u'/absolute/path/to/your/certificate/mycert.pem' c.ServerApp.keyfile = u'/absolute/path/to/your/certificate/mykey.key' # Set ip to '*' to bind on all interfaces (ips) for the public server c.ServerApp.ip = '*' c.ServerApp.password = u'sha1:bcd259ccf...' c.ServerApp.open_browser = False # It is a good idea to set a known, fixed port for server access c.ServerApp.port = 9999 You can then start the notebook using the ``jupyter server`` command. .. _using-lets-encrypt: Using Let's Encrypt ~~~~~~~~~~~~~~~~~~~ `Let's Encrypt`_ provides free SSL/TLS certificates. You can also set up a public server using a `Let's Encrypt`_ certificate. :ref:`jupyter_public_server` will be similar when using a Let's Encrypt certificate with a few configuration changes. Here are the steps: 1. Create a `Let's Encrypt certificate `_. 2. Use :ref:`hashed-pw` to create one. 3. If you don't already have config file for the notebook, create one using the following command: .. code-block:: bash $ jupyter server --generate-config 4. In the ``~/.jupyter`` directory, edit the notebook config file, ``jupyter_server_config.py``. By default, the notebook config file has all fields commented out. The minimum set of configuration options that you should to uncomment and edit in :file:`jupyter_server_config.py` is the following:: # Set options for certfile, ip, password, and toggle off # browser auto-opening c.ServerApp.certfile = u'/absolute/path/to/your/certificate/fullchain.pem' c.ServerApp.keyfile = u'/absolute/path/to/your/certificate/privkey.pem' # Set ip to '*' to bind on all interfaces (ips) for the public server c.ServerApp.ip = '*' c.ServerApp.password = u'sha1:bcd259ccf...' c.ServerApp.open_browser = False # It is a good idea to set a known, fixed port for server access c.ServerApp.port = 9999 You can then start the notebook using the ``jupyter server`` command. .. important:: **Use 'https'.** Keep in mind that when you enable SSL support, you must access the notebook server over ``https://``, not over plain ``http://``. The startup message from the server prints a reminder in the console, but *it is easy to overlook this detail and think the server is for some reason non-responsive*. **When using SSL, always access the notebook server with 'https://'.** You may now access the public server by pointing your browser to ``https://your.host.com:9999`` where ``your.host.com`` is your public server's domain. .. _`Let's Encrypt`: https://letsencrypt.org Firewall Setup ~~~~~~~~~~~~~~ To function correctly, the firewall on the computer running the jupyter notebook server must be configured to allow connections from client machines on the access port ``c.ServerApp.port`` set in :file:`jupyter_server_config.py` to allow connections to the web interface. The firewall must also allow connections from 127.0.0.1 (localhost) on ports from 49152 to 65535. These ports are used by the server to communicate with the notebook kernels. The kernel communication ports are chosen randomly by ZeroMQ, and may require multiple connections per kernel, so a large range of ports must be accessible. Running the notebook with a customized URL prefix ------------------------------------------------- The notebook dashboard, which is the landing page with an overview of the notebooks in your working directory, is typically found and accessed at the default URL ``http://localhost:8888/``. If you prefer to customize the URL prefix for the notebook dashboard, you can do so through modifying ``jupyter_server_config.py``. For example, if you prefer that the notebook dashboard be located with a sub-directory that contains other ipython files, e.g. ``http://localhost:8888/ipython/``, you can do so with configuration options like the following (see above for instructions about modifying ``jupyter_server_config.py``): .. code-block:: python c.ServerApp.base_url = '/ipython/' Embedding the notebook in another website ----------------------------------------- Sometimes you may want to embed the notebook somewhere on your website, e.g. in an IFrame. To do this, you may need to override the Content-Security-Policy to allow embedding. Assuming your website is at `https://mywebsite.example.com`, you can embed the notebook on your website with the following configuration setting in :file:`jupyter_server_config.py`: .. code-block:: python c.ServerApp.tornado_settings = { 'headers': { 'Content-Security-Policy': "frame-ancestors https://mywebsite.example.com 'self' " } } When embedding the notebook in a website using an iframe, consider putting the notebook in single-tab mode. Since the notebook opens some links in new tabs by default, single-tab mode keeps the notebook from opening additional tabs. Adding the following to :file:`~/.jupyter/custom/custom.js` will enable single-tab mode: .. code-block:: javascript define(['base/js/namespace'], function(Jupyter){ Jupyter._target = '_self'; }); Using a gateway server for kernel management -------------------------------------------- You are now able to redirect the management of your kernels to a Gateway Server (i.e., `Jupyter Kernel Gateway `_ or `Jupyter Enterprise Gateway `_) simply by specifying a Gateway url via the following command-line option: .. code-block:: bash $ jupyter notebook --gateway-url=http://my-gateway-server:8888 the environment: .. code-block:: bash JUPYTER_GATEWAY_URL=http://my-gateway-server:8888 or in :file:`jupyter_notebook_config.py`: .. code-block:: python c.GatewayClient.url = http://my-gateway-server:8888 When provided, all kernel specifications will be retrieved from the specified Gateway server and all kernels will be managed by that server. This option enables the ability to target kernel processes against managed clusters while allowing for the notebook's management to remain local to the Notebook server. Known issues ------------ Proxies ~~~~~~~ When behind a proxy, especially if your system or browser is set to autodetect the proxy, the notebook web application might fail to connect to the server's websockets, and present you with a warning at startup. In this case, you need to configure your system not to use the proxy for the server's address. For example, in Firefox, go to the Preferences panel, Advanced section, Network tab, click 'Settings...', and add the address of the Jupyter server to the 'No proxy for' field. Content-Security-Policy (CSP) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Certain `security guidelines `_ recommend that servers use a Content-Security-Policy (CSP) header to prevent cross-site scripting vulnerabilities, specifically limiting to ``default-src: https:`` when possible. This directive causes two problems with Jupyter. First, it disables execution of inline javascript code, which is used extensively by Jupyter. Second, it limits communication to the https scheme, and prevents WebSockets from working because they communicate via the wss scheme (or ws for insecure communication). Jupyter uses WebSockets for interacting with kernels, so when you visit a server with such a CSP, your browser will block attempts to use wss, which will cause you to see "Connection failed" messages from jupyter notebooks, or simply no response from jupyter terminals. By looking in your browser's javascript console, you can see any error messages that will explain what is failing. To avoid these problem, you need to add ``'unsafe-inline'`` and ``connect-src https: wss:`` to your CSP header, at least for pages served by jupyter. (That is, you can leave your CSP unchanged for other parts of your website.) Note that multiple CSP headers are allowed, but successive CSP headers can only restrict the policy; they cannot loosen it. For example, if your server sends both of these headers Content-Security-Policy "default-src https: 'unsafe-inline'" Content-Security-Policy "connect-src https: wss:" the first policy will already eliminate wss connections, so the second has no effect. Therefore, you can't simply add the second header; you have to actually modify your CSP header to look more like this: Content-Security-Policy "default-src https: 'unsafe-inline'; connect-src https: wss:" Docker CMD ~~~~~~~~~~ Using ``jupyter server`` as a `Docker CMD `_ results in kernels repeatedly crashing, likely due to a lack of `PID reaping `_. To avoid this, use the `tini `_ ``init`` as your Dockerfile `ENTRYPOINT`:: # Add Tini. Tini operates as a process subreaper for jupyter. This prevents # kernel crashes. ENV TINI_VERSION v0.6.0 ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/bin/tini RUN chmod +x /usr/bin/tini ENTRYPOINT ["/usr/bin/tini", "--"] EXPOSE 8888 CMD ["jupyter", "server", "--port=8888", "--no-browser", "--ip=0.0.0.0"] jupyter_server-1.13.1/docs/source/operators/security.rst000066400000000000000000000204231415445537200235470ustar00rootroot00000000000000.. _server_security: Security in the Jupyter Server ============================== Since access to the Jupyter Server means access to running arbitrary code, it is important to restrict access to the server. For this reason, Jupyter Server uses a token-based authentication that is **on by default**. .. note:: If you enable a password for your server, token authentication is not enabled by default. When token authentication is enabled, the server uses a token to authenticate requests. This token can be provided to login to the server in three ways: - in the ``Authorization`` header, e.g.:: Authorization: token abcdef... - In a URL parameter, e.g.:: https://my-server/tree/?token=abcdef... - In the password field of the login form that will be shown to you if you are not logged in. When you start a Jupyter server with token authentication enabled (default), a token is generated to use for authentication. This token is logged to the terminal, so that you can copy/paste the URL into your browser:: [I 11:59:16.597 ServerApp] The Jupyter Server is running at: http://localhost:8888/?token=c8de56fa4deed24899803e93c227592aef6538f93025fe01 If the Jupyter server is going to open your browser automatically, an *additional* token is generated for launching the browser. This additional token can be used only once, and is used to set a cookie for your browser once it connects. After your browser has made its first request with this one-time-token, the token is discarded and a cookie is set in your browser. At any later time, you can see the tokens and URLs for all of your running servers with :command:`jupyter server list`:: $ jupyter server list Currently running servers: http://localhost:8888/?token=abc... :: /home/you/notebooks https://0.0.0.0:9999/?token=123... :: /tmp/public http://localhost:8889/ :: /tmp/has-password For servers with token-authentication enabled, the URL in the above listing will include the token, so you can copy and paste that URL into your browser to login. If a server has no token (e.g. it has a password or has authentication disabled), the URL will not include the token argument. Once you have visited this URL, a cookie will be set in your browser and you won't need to use the token again, unless you switch browsers, clear your cookies, or start a Jupyter server on a new port. Alternatives to token authentication ------------------------------------ If a generated token doesn't work well for you, you can set a password for your server. :command:`jupyter server password` will prompt you for a password, and store the hashed password in your :file:`jupyter_server_config.json`. .. versionadded:: 5.0 :command:`jupyter server password` command is added. It is possible disable authentication altogether by setting the token and password to empty strings, but this is **NOT RECOMMENDED**, unless authentication or access restrictions are handled at a different layer in your web application: .. sourcecode:: python c.ServerApp.token = '' c.ServerApp.password = '' Security in notebook documents ============================== As Jupyter Server become more popular for sharing and collaboration, the potential for malicious people to attempt to exploit the notebook for their nefarious purposes increases. IPython 2.0 introduced a security model to prevent execution of untrusted code without explicit user input. The problem ----------- The whole point of Jupyter is arbitrary code execution. We have no desire to limit what can be done with a notebook, which would negatively impact its utility. Unlike other programs, a Jupyter notebook document includes output. Unlike other documents, that output exists in a context that can execute code (via Javascript). The security problem we need to solve is that no code should execute just because a user has **opened** a notebook that **they did not write**. Like any other program, once a user decides to execute code in a notebook, it is considered trusted, and should be allowed to do anything. Our security model ------------------ - Untrusted HTML is always sanitized - Untrusted Javascript is never executed - HTML and Javascript in Markdown cells are never trusted - **Outputs** generated by the user are trusted - Any other HTML or Javascript (in Markdown cells, output generated by others) is never trusted - The central question of trust is "Did the current user do this?" The details of trust -------------------- When a notebook is executed and saved, a signature is computed from a digest of the notebook's contents plus a secret key. This is stored in a database, writable only by the current user. By default, this is located at:: ~/.local/share/jupyter/nbsignatures.db # Linux ~/Library/Jupyter/nbsignatures.db # OS X %APPDATA%/jupyter/nbsignatures.db # Windows Each signature represents a series of outputs which were produced by code the current user executed, and are therefore trusted. When you open a notebook, the server computes its signature, and checks if it's in the database. If a match is found, HTML and Javascript output in the notebook will be trusted at load, otherwise it will be untrusted. Any output generated during an interactive session is trusted. Updating trust ************** A notebook's trust is updated when the notebook is saved. If there are any untrusted outputs still in the notebook, the notebook will not be trusted, and no signature will be stored. If all untrusted outputs have been removed (either via ``Clear Output`` or re-execution), then the notebook will become trusted. While trust is updated per output, this is only for the duration of a single session. A newly loaded notebook file is either trusted or not in its entirety. Explicit trust ************** Sometimes re-executing a notebook to generate trusted output is not an option, either because dependencies are unavailable, or it would take a long time. Users can explicitly trust a notebook in two ways: - At the command-line, with:: jupyter trust /path/to/notebook.ipynb - After loading the untrusted notebook, with ``File / Trust Notebook`` These two methods simply load the notebook, compute a new signature, and add that signature to the user's database. Reporting security issues ------------------------- If you find a security vulnerability in Jupyter, either a failure of the code to properly implement the model described here, or a failure of the model itself, please report it to security@ipython.org. If you prefer to encrypt your security reports, you can use :download:`this PGP public key `. Affected use cases ------------------ Some use cases that work in Jupyter 1.0 became less convenient in 2.0 as a result of the security changes. We do our best to minimize these annoyances, but security is always at odds with convenience. Javascript and CSS in Markdown cells ************************************ While never officially supported, it had become common practice to put hidden Javascript or CSS styling in Markdown cells, so that they would not be visible on the page. Since Markdown cells are now sanitized (by `Google Caja `__), all Javascript (including click event handlers, etc.) and CSS will be stripped. We plan to provide a mechanism for notebook themes, but in the meantime styling the notebook can only be done via either ``custom.css`` or CSS in HTML output. The latter only have an effect if the notebook is trusted, because otherwise the output will be sanitized just like Markdown. Collaboration ************* When collaborating on a notebook, people probably want to see the outputs produced by their colleagues' most recent executions. Since each collaborator's key will differ, this will result in each share starting in an untrusted state. There are three basic approaches to this: - re-run notebooks when you get them (not always viable) - explicitly trust notebooks via ``jupyter trust`` or the notebook menu (annoying, but easy) - share a notebook signatures database, and use configuration dedicated to the collaboration while working on the project. To share a signatures database among users, you can configure: .. code-block:: python c.NotebookNotary.data_dir = "/path/to/signature_dir" to specify a non-default path to the SQLite database (of notebook hashes, essentially). jupyter_server-1.13.1/docs/source/other/000077500000000000000000000000001415445537200202505ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/other/faq.rst000066400000000000000000000006621415445537200215550ustar00rootroot00000000000000.. _faq: Frequently asked questions ========================== Here is a list of questions we think you might have. This list will always be growing, so please feel free to add your question+anwer to this page! |:rocket:| Can I configure multiple extensions at once? -------------------------------------------- Checkout our "Operator" docs on how to :ref:`configure extensions `. |:closed_book:| jupyter_server-1.13.1/docs/source/other/full-config.rst000066400000000000000000001163671415445537200232250ustar00rootroot00000000000000.. _other-full-config: Config file and command line options ==================================== The Jupyter Server can be run with a variety of command line arguments. A list of available options can be found below in the :ref:`options section `. Defaults for these options can also be set by creating a file named ``jupyter_server_config.py`` in your Jupyter folder. The Jupyter folder is in your home directory, ``~/.jupyter``. To create a ``jupyter_server_config.py`` file, with all the defaults commented out, you can use the following command line:: $ jupyter server --generate-config .. _options: Options ------- This list of options can be generated by running the following and hitting enter:: $ jupyter server --help-all Application.log_datefmt : Unicode Default: ``'%Y-%m-%d %H:%M:%S'`` The date format used by logging formatters for %(asctime)s Application.log_format : Unicode Default: ``'[%(name)s]%(highlevel)s %(message)s'`` The Logging format template Application.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` Default: ``30`` Set the log level by value or name. Application.show_config : Bool Default: ``False`` Instead of starting the Application, dump configuration to stdout Application.show_config_json : Bool Default: ``False`` Instead of starting the Application, dump configuration to stdout (as JSON) JupyterApp.answer_yes : Bool Default: ``False`` Answer yes to any prompts. JupyterApp.config_file : Unicode Default: ``''`` Full path of a config file. JupyterApp.config_file_name : Unicode Default: ``''`` Specify a config file to load. JupyterApp.generate_config : Bool Default: ``False`` Generate default config file. JupyterApp.log_datefmt : Unicode Default: ``'%Y-%m-%d %H:%M:%S'`` The date format used by logging formatters for %(asctime)s JupyterApp.log_format : Unicode Default: ``'[%(name)s]%(highlevel)s %(message)s'`` The Logging format template JupyterApp.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` Default: ``30`` Set the log level by value or name. JupyterApp.show_config : Bool Default: ``False`` Instead of starting the Application, dump configuration to stdout JupyterApp.show_config_json : Bool Default: ``False`` Instead of starting the Application, dump configuration to stdout (as JSON) ServerApp.allow_credentials : Bool Default: ``False`` Set the Access-Control-Allow-Credentials: true header ServerApp.allow_origin : Unicode Default: ``''`` Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. ServerApp.allow_origin_pat : Unicode Default: ``''`` Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: Access-Control-Allow-Origin: origin where `origin` is the origin of the request. Ignored if allow_origin is set. ServerApp.allow_password_change : Bool Default: ``True`` Allow password to be changed at login for the Jupyter server. While logging in with a token, the Jupyter server UI will give the opportunity to the user to enter a new password at the same time that will replace the token login mechanism. This can be set to false to prevent changing password from the UI/API. ServerApp.allow_remote_access : Bool Default: ``False`` Allow requests where the Host header doesn't point to a local server By default, requests get a 403 forbidden response if the 'Host' header shows that the browser thinks it's on a non-local domain. Setting this option to True disables this check. This protects against 'DNS rebinding' attacks, where a remote web server serves you a page and then changes its DNS to send later requests to a local IP, bypassing same-origin checks. Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along with hostnames configured in local_hostnames. ServerApp.allow_root : Bool Default: ``False`` Whether to allow the user to run the server as root. ServerApp.answer_yes : Bool Default: ``False`` Answer yes to any prompts. ServerApp.base_url : Unicode Default: ``'/'`` The base URL for the Jupyter server. Leading and trailing slashes can be omitted, and will automatically be added. ServerApp.browser : Unicode Default: ``''`` Specify what command to use to invoke a web browser when starting the server. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. ServerApp.certfile : Unicode Default: ``''`` The full path to an SSL/TLS certificate file. ServerApp.client_ca : Unicode Default: ``''`` The full path to a certificate authority certificate for SSL/TLS client authentication. ServerApp.config_file : Unicode Default: ``''`` Full path of a config file. ServerApp.config_file_name : Unicode Default: ``''`` Specify a config file to load. ServerApp.config_manager_class : Type Default: ``'jupyter_server.services.config.manager.ConfigManager'`` The config manager class to use ServerApp.contents_manager_class : Type Default: ``'jupyter_server.services.contents.largefilemanager.LargeFileM...`` The content manager class to use. ServerApp.cookie_options : Dict Default: ``{}`` Extra keyword arguments to pass to `set_secure_cookie`. See tornado's set_secure_cookie docs for details. ServerApp.cookie_secret : Bytes Default: ``b''`` The random bytes used to secure cookies. By default this is a new random number every time you start the server. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). ServerApp.cookie_secret_file : Unicode Default: ``''`` The file where the cookie secret is stored. ServerApp.custom_display_url : Unicode Default: ``''`` Override URL shown to users. Replace actual URL, including protocol, address, port and base URL, with the given value when displaying URL to the users. Do not change the actual connection URL. If authentication token is enabled, the token is added to the custom URL automatically. This option is intended to be used when the URL to display to the user cannot be determined reliably by the Jupyter server (proxified or containerized setups for example). ServerApp.default_url : Unicode Default: ``'/'`` The default URL to redirect to from `/` ServerApp.disable_check_xsrf : Bool Default: ``False`` Disable cross-site-request-forgery protection Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, requiring API requests to either: - originate from pages served by this server (validated with XSRF cookie and token), or - authenticate with a token Some anonymous compute resources still desire the ability to run code, completely without authentication. These services can disable all authentication and security checks, with the full knowledge of what that implies. ServerApp.extra_services : List Default: ``[]`` handlers that should be loaded at higher priority than the default services ServerApp.extra_static_paths : List Default: ``[]`` Extra paths to search for serving static files. This allows adding javascript/css to be available from the Jupyter server machine, or overriding individual files in the IPython ServerApp.extra_template_paths : List Default: ``[]`` Extra paths to search for serving jinja templates. Can be used to override templates from jupyter_server.templates. ServerApp.file_to_run : Unicode Default: ``''`` No description ServerApp.generate_config : Bool Default: ``False`` Generate default config file. ServerApp.get_secure_cookie_kwargs : Dict Default: ``{}`` Extra keyword arguments to pass to `get_secure_cookie`. See tornado's get_secure_cookie docs for details. ServerApp.iopub_data_rate_limit : Float Default: ``1000000`` (bytes/sec) Maximum rate at which stream output can be sent on iopub before they are limited. ServerApp.iopub_msg_rate_limit : Float Default: ``1000`` (msgs/sec) Maximum rate at which messages can be sent on iopub before they are limited. ServerApp.ip : Unicode Default: ``'localhost'`` The IP address the Jupyter server will listen on. ServerApp.jinja_environment_options : Dict Default: ``{}`` Supply extra arguments that will be passed to Jinja environment. ServerApp.jinja_template_vars : Dict Default: ``{}`` Extra variables to supply to jinja templates when rendering. ServerApp.jpserver_extensions : Dict Default: ``{}`` Dict of Python modules to load as notebook server extensions.Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order. ServerApp.kernel_manager_class : Type Default: ``'jupyter_server.services.kernels.kernelmanager.MappingKernelM...`` The kernel manager class to use. ServerApp.kernel_spec_manager_class : Type Default: ``'jupyter_client.kernelspec.KernelSpecManager'`` The kernel spec manager class to use. Should be a subclass of `jupyter_client.kernelspec.KernelSpecManager`. The Api of KernelSpecManager is provisional and might change without warning between this version of Jupyter and the next stable one. ServerApp.keyfile : Unicode Default: ``''`` The full path to a private key file for usage with SSL/TLS. ServerApp.local_hostnames : List Default: ``['localhost']`` Hostnames to allow as local when allow_remote_access is False. Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as local as well. ServerApp.log_datefmt : Unicode Default: ``'%Y-%m-%d %H:%M:%S'`` The date format used by logging formatters for %(asctime)s ServerApp.log_format : Unicode Default: ``'[%(name)s]%(highlevel)s %(message)s'`` The Logging format template ServerApp.log_level : any of ``0``|``10``|``20``|``30``|``40``|``50``|``'DEBUG'``|``'INFO'``|``'WARN'``|``'ERROR'``|``'CRITICAL'`` Default: ``30`` Set the log level by value or name. ServerApp.login_handler_class : Type Default: ``'jupyter_server.auth.login.LoginHandler'`` The login handler class to use. ServerApp.logout_handler_class : Type Default: ``'jupyter_server.auth.logout.LogoutHandler'`` The logout handler class to use. ServerApp.max_body_size : Int Default: ``536870912`` Sets the maximum allowed size of the client request body, specified in the Content-Length request header field. If the size in a request exceeds the configured value, a malformed HTTP message is returned to the client. Note: max_body_size is applied even in streaming mode. ServerApp.max_buffer_size : Int Default: ``536870912`` Gets or sets the maximum amount of memory, in bytes, that is allocated for use by the buffer manager. ServerApp.notebook_dir : Unicode Default: ``''`` DEPRECATED, use root_dir. ServerApp.open_browser : Bool Default: ``False`` Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (ServerApp.browser) configuration option. ServerApp.password : Unicode Default: ``''`` Hashed password to use for web authentication. To generate, type in a python/IPython shell: from jupyter_server.auth import passwd; passwd() The string should be of the form type:salt:hashed-password. ServerApp.password_required : Bool Default: ``False`` Forces users to use a password for the Jupyter server. This is useful in a multi user environment, for instance when everybody in the LAN can access each other's machine through ssh. In such a case, serving on localhost is not secure since any user can connect to the Jupyter server via ssh. ServerApp.port : Int Default: ``8888`` The port the Jupyter server will listen on. ServerApp.port_retries : Int Default: ``50`` The number of additional ports to try if the specified port is not available. ServerApp.pylab : Unicode Default: ``'disabled'`` DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. ServerApp.quit_button : Bool Default: ``True`` If True, display controls to shut down the Jupyter server, such as menu items or buttons. ServerApp.rate_limit_window : Float Default: ``3`` (sec) Time window used to check the message and data rate limits. ServerApp.reraise_server_extension_failures : Bool Default: ``False`` Reraise exceptions encountered loading server extensions? ServerApp.root_dir : Unicode Default: ``''`` The directory to use for notebooks and kernels. ServerApp.session_manager_class : Type Default: ``'jupyter_server.services.sessions.sessionmanager.SessionManager'`` The session manager class to use. ServerApp.show_config : Bool Default: ``False`` Instead of starting the Application, dump configuration to stdout ServerApp.show_config_json : Bool Default: ``False`` Instead of starting the Application, dump configuration to stdout (as JSON) ServerApp.shutdown_no_activity_timeout : Int Default: ``0`` Shut down the server after N seconds with no kernels or terminals running and no activity. This can be used together with culling idle kernels (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when it's not in use. This is not precisely timed: it may shut down up to a minute later. 0 (the default) disables this automatic shutdown. ServerApp.ssl_options : Dict Default: ``{}`` Supply SSL options for the tornado HTTPServer. See the tornado docs for details. ServerApp.terminado_settings : Dict Default: ``{}`` Supply overrides for terminado. Currently only supports "shell_command". ServerApp.terminals_enabled : Bool Default: ``True`` Set to False to disable terminals. This does *not* make the server more secure by itself. Anything the user can in a terminal, they can also do in a notebook. Terminals may also be automatically disabled if the terminado package is not available. ServerApp.token : Unicode Default: ``''`` Token used for authenticating first-time connections to the server. When no password is enabled, the default is to generate a new, random token. Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. ServerApp.tornado_settings : Dict Default: ``{}`` Supply overrides for the tornado.web.Application that the Jupyter server uses. ServerApp.trust_xheaders : Bool Default: ``False`` Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headerssent by the upstream reverse proxy. Necessary if the proxy handles SSL ServerApp.webbrowser_open_new : Int Default: ``2`` Specify where to open the server on startup. This is the `new` argument passed to the standard library method `webbrowser.open`. The behaviour is not guaranteed, but depends on browser support. Valid values are: - 2 opens a new tab, - 1 opens a new window, - 0 opens in an existing window. See the `webbrowser.open` documentation for details. ServerApp.websocket_compression_options : Any Default: ``None`` Set the tornado compression options for websocket connections. This value will be returned from :meth:`WebSocketHandler.get_compression_options`. None (default) will disable compression. A dict (even an empty one) will enable compression. See the tornado docs for WebSocketHandler.get_compression_options for details. ServerApp.websocket_url : Unicode Default: ``''`` The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] ConnectionFileMixin.connection_file : Unicode Default: ``''`` JSON file in which to store connection info [default: kernel-.json] This file will contain the IP, ports, and authentication key needed to connect clients to this kernel. By default, this file will be created in the security dir of the current profile, but can be specified by absolute path. ConnectionFileMixin.control_port : Int Default: ``0`` set the control (ROUTER) port [default: random] ConnectionFileMixin.hb_port : Int Default: ``0`` set the heartbeat port [default: random] ConnectionFileMixin.iopub_port : Int Default: ``0`` set the iopub (PUB) port [default: random] ConnectionFileMixin.ip : Unicode Default: ``''`` Set the kernel's IP address [default localhost]. If the IP address is something other than localhost, then Consoles on other machines will be able to connect to the Kernel, so be careful! ConnectionFileMixin.shell_port : Int Default: ``0`` set the shell (ROUTER) port [default: random] ConnectionFileMixin.stdin_port : Int Default: ``0`` set the stdin (ROUTER) port [default: random] ConnectionFileMixin.transport : any of ``'tcp'``|``'ipc'`` (case-insensitive) Default: ``'tcp'`` No description KernelManager.autorestart : Bool Default: ``True`` Should we autorestart the kernel if it dies. KernelManager.connection_file : Unicode Default: ``''`` JSON file in which to store connection info [default: kernel-.json] This file will contain the IP, ports, and authentication key needed to connect clients to this kernel. By default, this file will be created in the security dir of the current profile, but can be specified by absolute path. KernelManager.control_port : Int Default: ``0`` set the control (ROUTER) port [default: random] KernelManager.hb_port : Int Default: ``0`` set the heartbeat port [default: random] KernelManager.iopub_port : Int Default: ``0`` set the iopub (PUB) port [default: random] KernelManager.ip : Unicode Default: ``''`` Set the kernel's IP address [default localhost]. If the IP address is something other than localhost, then Consoles on other machines will be able to connect to the Kernel, so be careful! KernelManager.kernel_cmd : List Default: ``[]`` DEPRECATED: Use kernel_name instead. The Popen Command to launch the kernel. Override this if you have a custom kernel. If kernel_cmd is specified in a configuration file, Jupyter does not pass any arguments to the kernel, because it cannot make any assumptions about the arguments that the kernel understands. In particular, this means that the kernel does not receive the option --debug if it given on the Jupyter command line. KernelManager.shell_port : Int Default: ``0`` set the shell (ROUTER) port [default: random] KernelManager.shutdown_wait_time : Float Default: ``5.0`` Time to wait for a kernel to terminate before killing it, in seconds. KernelManager.stdin_port : Int Default: ``0`` set the stdin (ROUTER) port [default: random] KernelManager.transport : any of ``'tcp'``|``'ipc'`` (case-insensitive) Default: ``'tcp'`` No description Session.buffer_threshold : Int Default: ``1024`` Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling. Session.check_pid : Bool Default: ``True`` Whether to check PID to protect against calls after fork. This check can be disabled if fork-safety is handled elsewhere. Session.copy_threshold : Int Default: ``65536`` Threshold (in bytes) beyond which a buffer should be sent without copying. Session.debug : Bool Default: ``False`` Debug output in the Session Session.digest_history_size : Int Default: ``65536`` The maximum number of digests to remember. The digest history will be culled when it exceeds this value. Session.item_threshold : Int Default: ``64`` The maximum number of items for a container to be introspected for custom serialization. Containers larger than this are pickled outright. Session.key : CBytes Default: ``b''`` execution key, for signing messages. Session.keyfile : Unicode Default: ``''`` path to file containing execution key. Session.metadata : Dict Default: ``{}`` Metadata dictionary, which serves as the default top-level metadata dict for each message. Session.packer : DottedObjectName Default: ``'json'`` The name of the packer for serializing messages. Should be one of 'json', 'pickle', or an import name for a custom callable serializer. Session.session : CUnicode Default: ``''`` The UUID identifying this session. Session.signature_scheme : Unicode Default: ``'hmac-sha256'`` The digest scheme used to construct the message signatures. Must have the form 'hmac-HASH'. Session.unpacker : DottedObjectName Default: ``'json'`` The name of the unpacker for unserializing messages. Only used with custom functions for `packer`. Session.username : Unicode Default: ``'username'`` Username for the Session. Default is your system username. MultiKernelManager.default_kernel_name : Unicode Default: ``'python3'`` The name of the default kernel to start MultiKernelManager.kernel_manager_class : DottedObjectName Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` The kernel manager class. This is configurable to allow subclassing of the KernelManager for customized behavior. MultiKernelManager.shared_context : Bool Default: ``True`` Share a single zmq.Context to talk to all my kernels MappingKernelManager.allow_tracebacks : Bool Default: ``True`` Whether to send tracebacks to clients on exceptions. MappingKernelManager.allowed_message_types : List Default: ``[]`` White list of allowed kernel message types. When the list is empty, all message types are allowed. MappingKernelManager.buffer_offline_messages : Bool Default: ``True`` Whether messages from kernels whose frontends have disconnected should be buffered in-memory. When True (default), messages are buffered and replayed on reconnect, avoiding lost messages due to interrupted connectivity. Disable if long-running kernels will produce too much output while no frontends are connected. MappingKernelManager.cull_busy : Bool Default: ``False`` Whether to consider culling kernels which are busy. Only effective if cull_idle_timeout > 0. MappingKernelManager.cull_connected : Bool Default: ``False`` Whether to consider culling kernels which have one or more connections. Only effective if cull_idle_timeout > 0. MappingKernelManager.cull_idle_timeout : Int Default: ``0`` Timeout (in seconds) after which a kernel is considered idle and ready to be culled. Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled for users with poor network connections. MappingKernelManager.cull_interval : Int Default: ``300`` The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value. MappingKernelManager.default_kernel_name : Unicode Default: ``'python3'`` The name of the default kernel to start MappingKernelManager.kernel_info_timeout : Float Default: ``60`` Timeout for giving up on a kernel (in seconds). On starting and restarting kernels, we check whether the kernel is running and responsive by sending kernel_info_requests. This sets the timeout in seconds for how long the kernel can take before being presumed dead. This affects the MappingKernelManager (which handles kernel restarts) and the ZMQChannelsHandler (which handles the startup). MappingKernelManager.kernel_manager_class : DottedObjectName Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` The kernel manager class. This is configurable to allow subclassing of the KernelManager for customized behavior. MappingKernelManager.root_dir : Unicode Default: ``''`` No description MappingKernelManager.shared_context : Bool Default: ``True`` Share a single zmq.Context to talk to all my kernels MappingKernelManager.traceback_replacement_message : Unicode Default: ``'An exception occurred at runtime, which is not shown due to ...`` Message to print when allow_tracebacks is False, and an exception occurs KernelSpecManager.ensure_native_kernel : Bool Default: ``True`` If there is no Python kernelspec registered and the IPython kernel is available, ensure it is added to the spec list. KernelSpecManager.kernel_spec_class : Type Default: ``'jupyter_client.kernelspec.KernelSpec'`` The kernel spec class. This is configurable to allow subclassing of the KernelSpecManager for customized behavior. KernelSpecManager.whitelist : Set Default: ``set()`` Whitelist of allowed kernel names. By default, all installed kernels are allowed. ContentsManager.allow_hidden : Bool Default: ``False`` Allow access to hidden files ContentsManager.checkpoints : Instance Default: ``None`` No description ContentsManager.checkpoints_class : Type Default: ``'jupyter_server.services.contents.checkpoints.Checkpoints'`` No description ContentsManager.checkpoints_kwargs : Dict Default: ``{}`` No description ContentsManager.files_handler_class : Type Default: ``'jupyter_server.files.handlers.FilesHandler'`` handler class to use when serving raw file requests. Default is a fallback that talks to the ContentsManager API, which may be inefficient, especially for large files. Local files-based ContentsManagers can use a StaticFileHandler subclass, which will be much more efficient. Access to these files should be Authenticated. ContentsManager.files_handler_params : Dict Default: ``{}`` Extra parameters to pass to files_handler_class. For example, StaticFileHandlers generally expect a `path` argument specifying the root directory from which to serve files. ContentsManager.hide_globs : List Default: ``['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dyl...`` Glob patterns to hide in file and directory listings. ContentsManager.pre_save_hook : Any Default: ``None`` Python callable or importstring thereof To be called on a contents model prior to save. This can be used to process the structure, such as removing notebook outputs or other side effects that should not be saved. It will be called as (all arguments passed by keyword):: hook(path=path, model=model, contents_manager=self) - model: the model to be saved. Includes file contents. Modifying this dict will affect the file that is stored. - path: the API path of the save destination - contents_manager: this ContentsManager instance ContentsManager.root_dir : Unicode Default: ``'/'`` No description ContentsManager.untitled_directory : Unicode Default: ``'Untitled Folder'`` The base name used when creating untitled directories. ContentsManager.untitled_file : Unicode Default: ``'untitled'`` The base name used when creating untitled files. ContentsManager.untitled_notebook : Unicode Default: ``'Untitled'`` The base name used when creating untitled notebooks. FileManagerMixin.use_atomic_writing : Bool Default: ``True`` By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) FileContentsManager.allow_hidden : Bool Default: ``False`` Allow access to hidden files FileContentsManager.checkpoints : Instance Default: ``None`` No description FileContentsManager.checkpoints_class : Type Default: ``'jupyter_server.services.contents.checkpoints.Checkpoints'`` No description FileContentsManager.checkpoints_kwargs : Dict Default: ``{}`` No description FileContentsManager.delete_to_trash : Bool Default: ``True`` If True (default), deleting files will send them to the platform's trash/recycle bin, where they can be recovered. If False, deleting files really deletes them. FileContentsManager.files_handler_class : Type Default: ``'jupyter_server.files.handlers.FilesHandler'`` handler class to use when serving raw file requests. Default is a fallback that talks to the ContentsManager API, which may be inefficient, especially for large files. Local files-based ContentsManagers can use a StaticFileHandler subclass, which will be much more efficient. Access to these files should be Authenticated. FileContentsManager.files_handler_params : Dict Default: ``{}`` Extra parameters to pass to files_handler_class. For example, StaticFileHandlers generally expect a `path` argument specifying the root directory from which to serve files. FileContentsManager.hide_globs : List Default: ``['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dyl...`` Glob patterns to hide in file and directory listings. FileContentsManager.post_save_hook : Any Default: ``None`` Python callable or importstring thereof to be called on the path of a file just saved. This can be used to process the file on disk, such as converting the notebook to a script or HTML via nbconvert. It will be called as (all arguments passed by keyword):: hook(os_path=os_path, model=model, contents_manager=instance) - path: the filesystem path to the file just written - model: the model representing the file - contents_manager: this ContentsManager instance FileContentsManager.pre_save_hook : Any Default: ``None`` Python callable or importstring thereof To be called on a contents model prior to save. This can be used to process the structure, such as removing notebook outputs or other side effects that should not be saved. It will be called as (all arguments passed by keyword):: hook(path=path, model=model, contents_manager=self) - model: the model to be saved. Includes file contents. Modifying this dict will affect the file that is stored. - path: the API path of the save destination - contents_manager: this ContentsManager instance FileContentsManager.root_dir : Unicode Default: ``''`` No description FileContentsManager.untitled_directory : Unicode Default: ``'Untitled Folder'`` The base name used when creating untitled directories. FileContentsManager.untitled_file : Unicode Default: ``'untitled'`` The base name used when creating untitled files. FileContentsManager.untitled_notebook : Unicode Default: ``'Untitled'`` The base name used when creating untitled notebooks. FileContentsManager.use_atomic_writing : Bool Default: ``True`` By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) NotebookNotary.algorithm : any of ``'blake2s'``|``'sha512'``|``'md5'``|``'sha3_512'``|``'sha3_224'``|``'blake2b'``|``'sha384'``|``'sha1'``|``'sha3_256'``|``'sha256'``|``'sha224'``|``'sha3_384'`` Default: ``'sha256'`` The hashing algorithm used to sign notebooks. NotebookNotary.db_file : Unicode Default: ``''`` The sqlite file in which to store notebook signatures. By default, this will be in your Jupyter data directory. You can set it to ':memory:' to disable sqlite writing to the filesystem. NotebookNotary.secret : Bytes Default: ``b''`` The secret key with which notebooks are signed. NotebookNotary.secret_file : Unicode Default: ``''`` The file where the secret key is stored. NotebookNotary.store_factory : Callable Default: ``traitlets.Undefined`` A callable returning the storage backend for notebook signatures. The default uses an SQLite database. GatewayMappingKernelManager.allow_tracebacks : Bool Default: ``True`` Whether to send tracebacks to clients on exceptions. GatewayMappingKernelManager.allowed_message_types : List Default: ``[]`` White list of allowed kernel message types. When the list is empty, all message types are allowed. GatewayMappingKernelManager.buffer_offline_messages : Bool Default: ``True`` Whether messages from kernels whose frontends have disconnected should be buffered in-memory. When True (default), messages are buffered and replayed on reconnect, avoiding lost messages due to interrupted connectivity. Disable if long-running kernels will produce too much output while no frontends are connected. GatewayMappingKernelManager.cull_busy : Bool Default: ``False`` Whether to consider culling kernels which are busy. Only effective if cull_idle_timeout > 0. GatewayMappingKernelManager.cull_connected : Bool Default: ``False`` Whether to consider culling kernels which have one or more connections. Only effective if cull_idle_timeout > 0. GatewayMappingKernelManager.cull_idle_timeout : Int Default: ``0`` Timeout (in seconds) after which a kernel is considered idle and ready to be culled. Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled for users with poor network connections. GatewayMappingKernelManager.cull_interval : Int Default: ``300`` The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value. GatewayMappingKernelManager.default_kernel_name : Unicode Default: ``'python3'`` The name of the default kernel to start GatewayMappingKernelManager.kernel_info_timeout : Float Default: ``60`` Timeout for giving up on a kernel (in seconds). On starting and restarting kernels, we check whether the kernel is running and responsive by sending kernel_info_requests. This sets the timeout in seconds for how long the kernel can take before being presumed dead. This affects the MappingKernelManager (which handles kernel restarts) and the ZMQChannelsHandler (which handles the startup). GatewayMappingKernelManager.kernel_manager_class : DottedObjectName Default: ``'jupyter_client.ioloop.IOLoopKernelManager'`` The kernel manager class. This is configurable to allow subclassing of the KernelManager for customized behavior. GatewayMappingKernelManager.root_dir : Unicode Default: ``''`` No description GatewayMappingKernelManager.shared_context : Bool Default: ``True`` Share a single zmq.Context to talk to all my kernels GatewayMappingKernelManager.traceback_replacement_message : Unicode Default: ``'An exception occurred at runtime, which is not shown due to ...`` Message to print when allow_tracebacks is False, and an exception occurs GatewayKernelSpecManager.ensure_native_kernel : Bool Default: ``True`` If there is no Python kernelspec registered and the IPython kernel is available, ensure it is added to the spec list. GatewayKernelSpecManager.kernel_spec_class : Type Default: ``'jupyter_client.kernelspec.KernelSpec'`` The kernel spec class. This is configurable to allow subclassing of the KernelSpecManager for customized behavior. GatewayKernelSpecManager.whitelist : Set Default: ``set()`` Whitelist of allowed kernel names. By default, all installed kernels are allowed. GatewayClient.auth_token : Unicode Default: ``None`` The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN env var) GatewayClient.ca_certs : Unicode Default: ``None`` The filename of CA certificates or None to use defaults. (JUPYTER_GATEWAY_CA_CERTS env var) GatewayClient.client_cert : Unicode Default: ``None`` The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT env var) GatewayClient.client_key : Unicode Default: ``None`` The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) GatewayClient.connect_timeout : Float Default: ``60.0`` The time allowed for HTTP connection establishment with the Gateway server. (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) GatewayClient.env_whitelist : Unicode Default: ``''`` A comma-separated list of environment variable names that will be included, along with their values, in the kernel startup request. The corresponding `env_whitelist` configuration value must also be set on the Gateway server - since that configuration value indicates which environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var) GatewayClient.headers : Unicode Default: ``'{}'`` Additional HTTP headers to pass on the request. This value will be converted to a dict. (JUPYTER_GATEWAY_HEADERS env var) GatewayClient.http_pwd : Unicode Default: ``None`` The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) GatewayClient.http_user : Unicode Default: ``None`` The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) GatewayClient.kernels_endpoint : Unicode Default: ``'/api/kernels'`` The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) GatewayClient.kernelspecs_endpoint : Unicode Default: ``'/api/kernelspecs'`` The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) GatewayClient.kernelspecs_resource_endpoint : Unicode Default: ``'/kernelspecs'`` The gateway endpoint for accessing kernelspecs resources (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) GatewayClient.request_timeout : Float Default: ``60.0`` The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var) GatewayClient.url : Unicode Default: ``None`` The url of the Kernel or Enterprise Gateway server where kernel specifications are defined and kernel management takes place. If defined, this Notebook server acts as a proxy for all kernel management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) GatewayClient.validate_cert : Bool Default: ``True`` For HTTPS requests, determines if server's certificate should be validated or not. (JUPYTER_GATEWAY_VALIDATE_CERT env var) GatewayClient.ws_url : Unicode Default: ``None`` The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) jupyter_server-1.13.1/docs/source/other/index.rst000066400000000000000000000002031415445537200221040ustar00rootroot00000000000000Other helpful documentation --------------------------- .. toctree:: :maxdepth: 1 links faq full-config changelog jupyter_server-1.13.1/docs/source/other/links.rst000066400000000000000000000006571415445537200221320ustar00rootroot00000000000000List of helpful links ===================== * :ref:`Frequently Asked Questions ` * `Jupyter Server Github Repo `_ * `JupyterLab Github Repo `_ * `Jupyter Notebook Github Repo `_ * `Jupyterhub Github Repo `_ * `Jupyter Zoom Channel `_ jupyter_server-1.13.1/docs/source/users/000077500000000000000000000000001415445537200202705ustar00rootroot00000000000000jupyter_server-1.13.1/docs/source/users/configuration.rst000066400000000000000000000036161415445537200236770ustar00rootroot00000000000000.. _user-configuring-a-jupyter-server: Configuring a Jupyter Server ============================ Using a Jupyter config file --------------------------- By default, Jupyter Server looks for server-specific configuration in a ``jupyter_server_config`` file located on a Jupyter path. To list the paths where Jupyter Server will look, run: .. code-block:: console $ jupyter --paths config: /Users/username/.jupyter /usr/local/etc/jupyter /etc/jupyter data: /Users/username/Library/Jupyter /usr/local/share/jupyter /usr/share/jupyter runtime: /Users/username/Library/Jupyter/runtime The paths under ``config`` are listed in order of precedence. If the same trait is listed in multiple places, it will be set to the value from the file will highest precendence. Jupyter Server uses IPython's traitlets system for configuration. Traits can be listed in a Python or JSON config file. You can quickly create a ``jupyter_server_config.py`` file in the ``.jupyter`` directory, with all the defaults commented out, use the following command: .. code-block:: console $ jupyter server --generate-config In Python files, these traits will have the prefix ``c.ServerApp``. For example, your configuration file could look like: .. code-block:: python # inside a jupyter_server_config.py file. c.ServerApp.port = 9999 The same configuration in JSON, looks like: .. code-block:: json { "ServerApp": { "port": 9999 } } Using the CLI ------------- Alternatively, you can configure Jupyter Server when launching from the command line using CLI args. Prefix each argument with ``--ServerApp`` like so: .. code-block:: console $ jupyter server --ServerApp.port=9999 Full configuration list ----------------------- See the full list of configuration options for the server :ref:`here `. jupyter_server-1.13.1/docs/source/users/help.rst000066400000000000000000000004311415445537200217500ustar00rootroot00000000000000.. _user-getting-help: Getting Help ============ If you run into any issues or bugs, please open an `issue on Github `_. We'd also love to have you come by our :ref:`Team Meetings `. jupyter_server-1.13.1/docs/source/users/index.rst000066400000000000000000000005721415445537200221350ustar00rootroot00000000000000Documentation for Users ======================= The Jupyter Server is a highly technical piece of the Jupyter Stack, so users probably won't import or install this library directly. These pages are to meant to help you in case you run into issues or bugs. .. toctree:: :caption: Users :maxdepth: 1 :name: users installation configuration launching help jupyter_server-1.13.1/docs/source/users/installation.rst000066400000000000000000000011361415445537200235240ustar00rootroot00000000000000.. _user-installation: Installation ============ Most Jupyter users will **never need to install Jupyter Server manually**. Jupyter Web applications will include the (correct version) of Jupyter Server as a dependency. It's best to let those applications handle installation, because they may require a specific version of Jupyter Server. If you decide to install manually, run: .. code-block:: bash pip install jupyter_server You upgrade or downgrade to a specific version of Jupyter Server by adding an operator to the command above: .. code-block:: bash pip install jupyter_server==1.0 jupyter_server-1.13.1/docs/source/users/launching.rst000066400000000000000000000033001415445537200227660ustar00rootroot00000000000000.. _user-launching-a-bare-jupyter-server: Launching a bare Jupyter Server =============================== Most of the time, you won't need to start the Jupyter Server directly. Jupyter Web Applications (like Jupyter Notebook, Jupyterlab, Voila, etc.) come with their own entry points that start a server automatically. Sometimes, though, it can be useful to start Jupyter Server directly when you want to run multiple Jupyter Web applications at the same time. For more details, see the :ref:`Managing multiple extensions ` page. If these extensions are enabled, you can simple run the following: .. code-block:: bash > jupyter server [I 2020-03-20 15:48:20.903 ServerApp] Serving notebooks from local directory: /Users/username/home [I 2020-03-20 15:48:20.903 ServerApp] Jupyter Server 1.0.0 is running at: [I 2020-03-20 15:48:20.903 ServerApp] http://localhost:8888/?token=<...> [I 2020-03-20 15:48:20.903 ServerApp] or http://127.0.0.1:8888/?token=<...> [I 2020-03-20 15:48:20.903 ServerApp] Use Control-C to stop this server and shut down all kernels (twice to skip confirmation). [I 2020-03-20 15:48:20.903 ServerApp] Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at https://jupyter.org/community.html. [C 2020-03-20 15:48:20.907 ServerApp] To access the server, open this file in a browser: file:///Users/username/jpserver-###-open.html Or copy and paste one of these URLs: http://localhost:8888/?token=<...> or http://127.0.0.1:8888/?token=<...> jupyter_server-1.13.1/examples/000077500000000000000000000000001415445537200165155ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/000077500000000000000000000000001415445537200200065ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/README.md000066400000000000000000000161641415445537200212750ustar00rootroot00000000000000# Jupyter Server Simple Extension Example This folder contains example of simple extensions on top of Jupyter Server and review configuration aspects. ## Install You need `python3` to build and run the server extensions. ```bash # Clone, create a conda env and install from source. git clone https://github.com/jupyter/jupyter_server && \ cd examples/simple && \ conda create -y -n jupyter-server-example python=3.7 && \ conda activate jupyter-server-example && \ pip install -e .[test] ``` **OPTIONAL** If you want to build the Typescript code, you need [npm](https://www.npmjs.com) on your local environement. Compiled javascript is provided as artifact in this repository, so this Typescript build step is optional. The Typescript source and configuration have been taken from https://github.com/markellekelly/jupyter-server-example. ```bash npm install && \ npm run build ``` ## No Extension Ensure Jupyter Server is starting without any extension enabled. ```bash # Run this command from a shell. jupyter server ``` Browse the default home page, it should show a white page in your browser with the following content: `A Jupyter Server is running.` ```bash # Jupyter Server default Home Page. open http://localhost:8888 ``` ## Extension 1 ```bash # Start the jupyter server activating simple_ext1 extension. jupyter server --ServerApp.jpserver_extensions="{'simple_ext1': True}" ``` Now you can render `Extension 1` Server content in your browser. ```bash # Home page as defined by default_url = '/default'. open http://localhost:8888/simple_ext1/default # HTML static page. open http://localhost:8888/static/simple_ext1/home.html open http://localhost:8888/static/simple_ext1/test.html # Content from Handlers. open http://localhost:8888/simple_ext1/params/test?var1=foo # Content from Template. open http://localhost:8888/simple_ext1/template1/test # Content from Template with Typescript. open http://localhost:8888/simple_ext1/typescript # Error content. open http://localhost:8888/simple_ext1/nope # Redirect. open http://localhost:8888/simple_ext1/redirect # Favicon static content. open http://localhost:8888/static/simple_ext1/favicon.ico ``` You can also start the server extension with python modules. ```bash python -m simple_ext1 ``` To live reload the server as you change the extension, you can also enable [the `debug` mode for Tornado](https://www.tornadoweb.org/en/stable/guide/running.html#debug-mode-and-automatic-reloading): ```bash jupyter server --ServerApp.jpserver_extensions="{'simple_ext1': True}" --ServerApp.tornado_settings="{'debug': True}" ``` ## Extension 1 and Extension 2 The following command starts both the `simple_ext1` and `simple_ext2` extensions. ```bash # Start the jupyter server, it will load both simple_ext1 and simple_ext2 based on the provided trait. jupyter server --ServerApp.jpserver_extensions="{'simple_ext1': True, 'simple_ext2': True}" ``` Check that the previous `Extension 1` content is still available ant that you can also render `Extension 2` Server content in your browser. ```bash # HTML static page. open http://localhost:8888/static/simple_ext2/test.html # Content from Handlers. open http://localhost:8888/simple_ext2/params/test?var1=foo ``` ## Work with Entrypoints Optionally, you can copy `simple_ext1.json` and `simple_ext2.json` configuration to your env `etc` folder and start only Extension 1, which will also start Extension 2. ```bash pip uninstall -y jupyter_server_example && \ python setup.py install && \ cp -r ./etc $(dirname $(which jupyter))/.. ``` ```bash # Start the jupyter server extension simple_ext1, it will also load simple_ext2 because of load_other_extensions = True.. # When you invoke with the entrypoint, the default url will be opened in your browser. jupyter simple-ext1 ``` ## Configuration Stop any running server (with `CTRL+C`) and start with additional configuration on the command line. The provided settings via CLI will override the configuration that reside in the files (`jupyter_server_example1_config.py`...) ```bash jupyter simple-ext1 --SimpleApp1.configA="ConfigA from command line" ``` Check the log, it should return on startup print the Config object. The content of the Config is based on the trait you have defined via the `CLI` and in the `jupyter_server_example1_config.py`. ``` [SimpleApp1] Config {'SimpleApp1': {'configA': 'ConfigA from file', 'configB': 'ConfigB from file', 'configC': 'ConfigC from file'}} [SimpleApp1] Config {'SimpleApp1': {'configA': 'ConfigA from file', 'configB': 'ConfigB from file', 'configC': 'ConfigC from file'}} [SimpleApp2] WARNING | Config option `configD` not recognized by `SimpleApp2`. Did you mean one of: `configA, configB, configC`? [SimpleApp2] Config {'SimpleApp2': {'configD': 'ConfigD from file'}} [SimpleApp1] Config {'SimpleApp1': {'configA': 'ConfigA from command line', 'configB': 'ConfigB from file', 'configC': 'ConfigC from file'}} ``` ## Only Extension 2 Now stop agin the server and start with only `Extension 2`. ```bash # Start the jupyter server extension simple_ext2, it will NOT load simple_ext1 because of load_other_extensions = False. jupyter simple-ext2 ``` Try with the above links to check that only Extension 2 is responding (Extension 1 URLs should give you an 404 error). ## Extension 11 extends Extension 1 `Extension 11` extends `Extension 1` and brings a few more configs. ```bash # TODO `--generate-config` returns an exception `"The ExtensionApp has not ServerApp "` jupyter simple-ext11 --generate-config && vi ~/.jupyter/jupyter_config.py`. ``` The generated configuration should contains the following. ```bash # TODO ``` The `hello`, `ignore_js` and `simple11_dir` are traits defined on the SimpleApp11 class. It also implements additional flags and aliases for these traits. - The `--hello` flag will log on startup `Hello Simple11 - You have provided the --hello flag or defined a c.SimpleApp1.hello == True` - The `ignore_js` flag - The `--simple11-dir` alias will set `SimpleExt11.simple11_dir` settings Stop any running server and then start the simple-ext11. ```bash jupyter simple-ext11 --hello --simple11-dir any_folder # You can also launch with a module python -m simple_ext11 --hello # TODO FIX the following command, simple11 does not work launching with jpserver_extensions parameter. jupyter server --ServerApp.jpserver_extensions="{'simple_ext11': True}" --hello --simple11-dir any_folder ``` Ensure the following URLs respond correctly. ```bash # Jupyter Server Home Page. open http://localhost:8888/ # TODO Fix Default URL, it does not show on startup. # Home page as defined by default_url = '/default'. open http://localhost:8888/simple_ext11/default # HTML static page. open http://localhost:8888/static/simple_ext11/test.html # Content from Handlers. open http://localhost:8888/simple_ext11/params/test?var1=foo # Content from Template. open http://localhost:8888/simple_ext11/template1/test # Content from Template with Typescript. open http://localhost:8888/simple_ext11/typescript # Error content. open http://localhost:8888/simple_ext11/nope # Redirect. open http://localhost:8888/simple_ext11/redirect # Favicon static content. open http://localhost:8888/static/simple_ext11/favicon.ico ``` jupyter_server-1.13.1/examples/simple/conftest.py000066400000000000000000000000461415445537200222050ustar00rootroot00000000000000from jupyter_server.conftest import * jupyter_server-1.13.1/examples/simple/etc/000077500000000000000000000000001415445537200205615ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/etc/jupyter/000077500000000000000000000000001415445537200222635ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/etc/jupyter/jupyter_server_config.d/000077500000000000000000000000001415445537200271225ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext1.json000066400000000000000000000001261415445537200322460ustar00rootroot00000000000000{ "ServerApp": { "jpserver_extensions": { "simple_ext1": true } } } jupyter_server-1.13.1/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext11.json000066400000000000000000000001271415445537200323300ustar00rootroot00000000000000{ "ServerApp": { "jpserver_extensions": { "simple_ext11": true } } } jupyter_server-1.13.1/examples/simple/etc/jupyter/jupyter_server_config.d/simple_ext2.json000066400000000000000000000001261415445537200322470ustar00rootroot00000000000000{ "ServerApp": { "jpserver_extensions": { "simple_ext2": true } } } jupyter_server-1.13.1/examples/simple/jupyter_server_config.py000066400000000000000000000006211415445537200247740ustar00rootroot00000000000000# Configuration file for jupyter-server extensions. # ------------------------------------------------------------------------------ # Application(SingletonConfigurable) configuration # ------------------------------------------------------------------------------ ## The date format used by logging formatters for %(asctime)s c.Application.log_datefmt = "%Y-%m-%d %H:%M:%S Simple_Extensions_Example" jupyter_server-1.13.1/examples/simple/jupyter_simple_ext11_config.py000066400000000000000000000000371415445537200260020ustar00rootroot00000000000000c.SimpleApp11.ignore_js = True jupyter_server-1.13.1/examples/simple/jupyter_simple_ext1_config.py000066400000000000000000000002541415445537200257220ustar00rootroot00000000000000c.SimpleApp1.configA = "ConfigA from file" c.SimpleApp1.configB = "ConfigB from file" c.SimpleApp1.configC = "ConfigC from file" c.SimpleApp1.configD = "ConfigD from file" jupyter_server-1.13.1/examples/simple/jupyter_simple_ext2_config.py000066400000000000000000000000531415445537200257200ustar00rootroot00000000000000c.SimpleApp2.configD = "ConfigD from file" jupyter_server-1.13.1/examples/simple/package.json000066400000000000000000000006051415445537200222750ustar00rootroot00000000000000{ "name": "jupyter-server-example", "version": "0.0.1", "private": true, "scripts": { "build": "tsc -p src && webpack", "clean": "rimraf build", "prepublishOnly": "npm run build" }, "dependencies": {}, "devDependencies": { "rifraf": "2.0.3", "webpack": "~4.29.6", "webpack-cli": "^3.3.0", "whatwg-fetch": "~2.0.3", "typescript": "3.6.4" } } jupyter_server-1.13.1/examples/simple/pyproject.toml000066400000000000000000000001761415445537200227260ustar00rootroot00000000000000[build-system] requires = ["jupyter_packaging~=0.5.0", "setuptools>=40.8.0", "wheel"] build-backend = "setuptools.build_meta" jupyter_server-1.13.1/examples/simple/pytest.ini000066400000000000000000000000711415445537200220350ustar00rootroot00000000000000[pytest] # Disable any upper exclusion. norecursedirs = jupyter_server-1.13.1/examples/simple/setup.py000066400000000000000000000030021415445537200215130ustar00rootroot00000000000000import os from jupyter_packaging import create_cmdclass from setuptools import setup VERSION = "0.0.1" def get_data_files(): """Get the data files for the package.""" data_files = [ ("etc/jupyter/jupyter_server_config.d", "etc/jupyter/jupyter_server_config.d/", "*.json"), ] def add_data_files(path): for (dirpath, dirnames, filenames) in os.walk(path): if filenames: paths = [(dirpath, dirpath, filename) for filename in filenames] data_files.extend(paths) # Add all static and templates folders. add_data_files("simple_ext1/static") add_data_files("simple_ext1/templates") add_data_files("simple_ext2/static") add_data_files("simple_ext2/templates") return data_files cmdclass = create_cmdclass(data_files_spec=get_data_files()) setup_args = dict( name="jupyter_server_example", version=VERSION, description="Jupyter Server Example", long_description=open("README.md").read(), python_requires=">=3.6", install_requires=[ "jupyter_server", "jinja2", ], extras_require={ "test": ["pytest"], }, include_package_data=True, cmdclass=cmdclass, entry_points={ "console_scripts": [ "jupyter-simple-ext1 = simple_ext1.application:main", "jupyter-simple-ext11 = simple_ext11.application:main", "jupyter-simple-ext2 = simple_ext2.application:main", ] }, ) if __name__ == "__main__": setup(**setup_args) jupyter_server-1.13.1/examples/simple/simple_ext1/000077500000000000000000000000001415445537200222405ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/simple_ext1/__init__.py000066400000000000000000000002231415445537200243460ustar00rootroot00000000000000from .application import SimpleApp1 def _jupyter_server_extension_paths(): return [{"module": "simple_ext1.application", "app": SimpleApp1}] jupyter_server-1.13.1/examples/simple/simple_ext1/__main__.py000066400000000000000000000001051415445537200243260ustar00rootroot00000000000000from .application import main if __name__ == "__main__": main() jupyter_server-1.13.1/examples/simple/simple_ext1/application.py000066400000000000000000000041211415445537200251130ustar00rootroot00000000000000import os from traitlets import Unicode from .handlers import DefaultHandler from .handlers import ErrorHandler from .handlers import ParameterHandler from .handlers import RedirectHandler from .handlers import TemplateHandler from .handlers import TypescriptHandler from jupyter_server.extension.application import ExtensionApp from jupyter_server.extension.application import ExtensionAppJinjaMixin DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "templates") class SimpleApp1(ExtensionAppJinjaMixin, ExtensionApp): # The name of the extension. name = "simple_ext1" # The url that your extension will serve its homepage. extension_url = "/simple_ext1/default" # Should your extension expose other server extensions when launched directly? load_other_extensions = True # Local path to static files directory. static_paths = [DEFAULT_STATIC_FILES_PATH] # Local path to templates directory. template_paths = [DEFAULT_TEMPLATE_FILES_PATH] configA = Unicode("", config=True, help="Config A example.") configB = Unicode("", config=True, help="Config B example.") configC = Unicode("", config=True, help="Config C example.") def initialize_handlers(self): self.handlers.extend( [ (r"/{}/default".format(self.name), DefaultHandler), (r"/{}/params/(.+)$".format(self.name), ParameterHandler), (r"/{}/template1/(.*)$".format(self.name), TemplateHandler), (r"/{}/redirect".format(self.name), RedirectHandler), (r"/{}/typescript/?".format(self.name), TypescriptHandler), (r"/{}/(.*)", ErrorHandler), ] ) def initialize_settings(self): self.log.info("Config {}".format(self.config)) # ----------------------------------------------------------------------------- # Main entry point # ----------------------------------------------------------------------------- main = launch_new_instance = SimpleApp1.launch_instance jupyter_server-1.13.1/examples/simple/simple_ext1/handlers.py000066400000000000000000000040641415445537200244160ustar00rootroot00000000000000from jupyter_server.base.handlers import JupyterHandler from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin from jupyter_server.extension.handler import ExtensionHandlerMixin from jupyter_server.utils import url_escape class DefaultHandler(ExtensionHandlerMixin, JupyterHandler): def get(self): # The name of the extension to which this handler is linked. self.log.info("Extension Name in {} Default Handler: {}".format(self.name, self.name)) # A method for getting the url to static files (prefixed with /static/). self.log.info( "Static URL for / in simple_ext1 Default Handler: {}".format(self.static_url(path="/")) ) self.write("

Hello Simple 1 - I am the default...

") self.write("Config in {} Default Handler: {}".format(self.name, self.config)) class RedirectHandler(ExtensionHandlerMixin, JupyterHandler): def get(self): self.redirect("/static/{}/favicon.ico".format(self.name)) class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): def get(self, matched_part=None, *args, **kwargs): var1 = self.get_argument("var1", default=None) components = [x for x in self.request.path.split("/") if x] self.write("

Hello Simple App 1 from Handler.

") self.write("

matched_part: {}

".format(url_escape(matched_part))) self.write("

var1: {}

".format(url_escape(var1))) self.write("

components: {}

".format(components)) class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): pass class TypescriptHandler(BaseTemplateHandler): def get(self): self.write(self.render_template("typescript.html")) class TemplateHandler(BaseTemplateHandler): def get(self, path): """ Optionaly, you can print(self.get_template('simple1.html'))""" self.write(self.render_template("simple1.html", path=path)) class ErrorHandler(BaseTemplateHandler): def get(self, path): self.write(self.render_template("error.html", path=path)) jupyter_server-1.13.1/examples/simple/simple_ext1/static/000077500000000000000000000000001415445537200235275ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/simple_ext1/static/bundle.js000066400000000000000000000117471415445537200253500ustar00rootroot00000000000000/******/ (function (modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = {}; // The require function /******/ /******/ /******/ function __webpack_require__(moduleId) { /******/ /******/ // Check if module is in cache /******/ if (installedModules[moduleId]) { /******/ return installedModules[moduleId].exports; /******/ } // Create a new module (and put it into the cache) /******/ /******/ var module = (installedModules[moduleId] = { /******/ i: moduleId, /******/ l: false, /******/ exports: {}, /******/ }); // Execute the module function /******/ /******/ /******/ modules[moduleId].call( module.exports, module, module.exports, __webpack_require__ ); // Flag the module as loaded /******/ /******/ /******/ module.l = true; // Return the exports of the module /******/ /******/ /******/ return module.exports; /******/ } // expose the modules object (__webpack_modules__) /******/ /******/ /******/ /******/ __webpack_require__.m = modules; // expose the module cache /******/ /******/ /******/ __webpack_require__.c = installedModules; // define getter function for harmony exports /******/ /******/ /******/ __webpack_require__.d = function (exports, name, getter) { /******/ if (!__webpack_require__.o(exports, name)) { /******/ Object.defineProperty(exports, name, { enumerable: true, get: getter, }); /******/ } /******/ }; // define __esModule on exports /******/ /******/ /******/ __webpack_require__.r = function (exports) { /******/ if (typeof Symbol !== "undefined" && Symbol.toStringTag) { /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module", }); /******/ } /******/ Object.defineProperty(exports, "__esModule", { value: true }); /******/ }; // create a fake namespace object // mode & 1: value is a module id, require it // mode & 2: merge all properties of value into the ns // mode & 4: return value when already ns object // mode & 8|1: behave like require /******/ /******/ /******/ /******/ /******/ /******/ /******/ __webpack_require__.t = function ( value, mode ) { /******/ if (mode & 1) value = __webpack_require__(value); /******/ if (mode & 8) return value; /******/ if ( mode & 4 && typeof value === "object" && value && value.__esModule ) return value; /******/ var ns = Object.create(null); /******/ __webpack_require__.r(ns); /******/ Object.defineProperty(ns, "default", { enumerable: true, value: value, }); /******/ if (mode & 2 && typeof value != "string") for (var key in value) __webpack_require__.d( ns, key, function (key) { return value[key]; }.bind(null, key) ); /******/ return ns; /******/ }; // getDefaultExport function for compatibility with non-harmony modules /******/ /******/ /******/ __webpack_require__.n = function (module) { /******/ var getter = module && module.__esModule ? /******/ function getDefault() { return module["default"]; } : /******/ function getModuleExports() { return module; }; /******/ __webpack_require__.d(getter, "a", getter); /******/ return getter; /******/ }; // Object.prototype.hasOwnProperty.call /******/ /******/ /******/ __webpack_require__.o = function (object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; // __webpack_public_path__ /******/ /******/ /******/ __webpack_require__.p = ""; // Load entry module and return exports /******/ /******/ /******/ /******/ return __webpack_require__((__webpack_require__.s = 0)); /******/ })( /************************************************************************/ /******/ { /***/ "./simple_ext1/static/index.js": /*!*************************************!*\ !*** ./simple_ext1/static/index.js ***! \*************************************/ /*! no static exports found */ /***/ function (module, exports) { eval( 'function main() {\n let div = document.getElementById("mydiv");\n div.innerText = "Hello from Typescript";\n}\nwindow.addEventListener(\'load\', main);\n\n\n//# sourceURL=webpack:///./simple_ext1/static/index.js?' ); /***/ }, /***/ 0: /*!*******************************************!*\ !*** multi ./simple_ext1/static/index.js ***! \*******************************************/ /*! no static exports found */ /***/ function (module, exports, __webpack_require__) { eval( 'module.exports = __webpack_require__(/*! ./simple_ext1/static/index.js */"./simple_ext1/static/index.js");\n\n\n//# sourceURL=webpack:///multi_./simple_ext1/static/index.js?' ); /***/ }, /******/ } ); jupyter_server-1.13.1/examples/simple/simple_ext1/static/favicon.ico000066400000000000000000000764461415445537200256710ustar00rootroot00000000000000 hF 00 %V@@ (B:(  @&w&wW&ww&ww&wW&w&w%&w&w&w&w&w&w&w&w&w%&wO&w&w&w&w&w&w&w&w&w&w&wO&wA&w&w&wW'w&x&wW&w&w&wA'y &w&w)&w)&w'y 'x&z'x'x'x'x&z'x'y &w&w)&w)&w'y &wA&w&w&wW&w&x&wW&w&w&wA&wO&w&w&w&w&w&w&w&w&w&w&wO&w%&w&w&w&w&w&w&w&w&w%&w&wW&ww&ww&wW'w( @ (w &xW&w&w&w&w&w&w&w&w&xW(w 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1(x%&w&w&w&w&w&w'wM'z'z&wM&w&w&w&w&w&w(x%'| &w&w&w&w'w)(w)&w&w&w&w'| &w&w&w(x&z&w&w&w'y%&w'x5(x5&w'y%'xg&z'x'xe+x +x +x +x 'xe'x&z'xg'y%&w(x5'x5&w'y%&w&w&w'x(x&w&w&w'| &w&w&w&w(x''w)&w&w&w&w'| (x%&w&w&w&w&w&w&wM'z&{'wM&w&w&w&w&w&w(x%'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#(w &xW&w&w&w&w&w&w&w&w'wU(w (0` %+z 'x/'xO&wa&wm%wy%wy&wm&wa'xO'x/+z (w &wE&w&w&w&w&w&w&w&w&w&w&w&w&w&x&wE(w )z 'y3&xy&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&xy'y3)z )y!&w&w&w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w&v&w&w)y!+y &xY&w%v%v&w%w%v&w%w%v&w%w%v&w&v&w&w&v&w&w&v&w&w&v&w&v&w&xY+y 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&w%w&w&w&w&w'x.'y/&w&v&v&w&v&w%w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w%v&w%w%v&w%w&w'y/.'z%&w&w&w&w&w%w%v&w%w%v&w&w&w&w&w&w&w&w&w&w&w&w%v&v&w&w&v&w&w&v&w&w&w&w'z'(w&w&w&w&w&w&w&w&w&w'w'wI'|(z'xG&w}&w&w&v&w%w&w&w%w&w'w&x&w&v&v&w%v&v&w'x_'w+(w*}(x+&w]&w&v&w%v&w%w&w&w'|'wu&w&w&w&w'ws'w/'w)w(w-&wo&v&w&w&w'wu'|&vI&w&w&w&xk(x&z &wi&w&w&v&wI& &w%w'x'y+&w)'w&w&w& 'yM&w&wi*z+x'wg&w'yM)}&x&y)'x)&w)}(x#(x+(x+(x#+x+x+x+w*w+x+x+x(x#(x+(x+(x#)}&w'x)&z)&x)}'yM&w'wg+x)x&wg&w'yM& &w&w'w&w)'y+&w&w&w% &vG&w&v&w'wi'x 'x&xk&w&w&v&wI'|&wu&w&w&w&w&wo(x-)y'w&x/&ws&w&w&w&w'wu'|'x&w%w&w&w&w&w&w&w](x)*}(w'w+'x_&w&w&v&w&v&w&w&w(x&w%w&w&w%w&w&w&w&w&v}'xG(y&}&xI'v&w&w&w&w&v&w&w&v&w(x'z'&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'z'.'y/&w%w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&v&w&w&v&w&w&v&w&w&v&w&w&v&w&w'y/-'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&w&w&w'x+y &xY&w&w&w%w%w&w%w%w&w%w%w&w%w&w&v&w&w&v&w&w&v&w&w&v%v&w&xY+y )z!&w&w&w%w&w&w%w&w&w%w&w&w&w&w%w&w&w%w&w&w%w%w&w&w)z!)z 'y3&wy&w&w%w%w&w%w%w&w%w&w&w&w&w&w&w&w&w&w&wy&y3)z (w &wE&w&w&w&w&w&w&w&w%w&w&w&w&w'w'wE(w +z 'x/'xO&va&wm&wy&wy&wm&wa'xO'x/+z (@ B3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x&x(w-. 'yc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'yc. 3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'w'y3 )zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x&x'x'w&x(yu'wi'wi(yu&x'x'w'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w'w'w}'xC')z'zA(w{&x'x&w&w&w&w&w&w&w&w&w&w'x(x&w&w&w&w&w&w&w&w'x'x}(w-*}+(w{&w&w&w&w&w&w&w&w&w(x'xi&w&w&w&w&w&w&w'w}(y'+x%(w{'w&w&w&w&w&w&w'xi'|!'w&w&w&w&w'x(w[@@&xW&w&w&w&w&w'w'|!'w&w&w&w&x(xG&zC'w&w&w&w'w'wi&w&w'x'yo@U&wk'w&w&w'wi&'x&w&x.1y'x&w'x&(y&x&x'x}&x(y. &x&zI'xI&w. (ye)xE)xE(ye+x%+x%+x%+x%(ye)xE)xE(ye. &w'xI&zI&x. (y&x'x}&x&x(y&'x&w'x1y,z&x&w'x&'wi&w&w'w&wkU@'yo'x&w&w'wi'w&w&w&w'w'xC(xG&x&w&w&w'w'|!'w&w&w&w&w&w&xWU3(w['x&w&w&w&w'w'|!'xi&w&w&w&w&w&w'w(xy,|#(y''w}&w&w&w&w&w&w&w'xi(x&w&w&w&w&w&w&w&w&x(w{*}+(w-'x}'x&w&w&w&w&w&w&w&w(x'x&w&w&w&w&w&w&w&w&w&w'w&w(w{'zA)z'&zC'w}'w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'w'x&x(yu'wi'wi(yu&x'w'x&x&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+)zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'y3 . 'xc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'xc. (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x'w(w-3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 jupyter_server-1.13.1/examples/simple/simple_ext1/static/home.html000066400000000000000000000000541415445537200253440ustar00rootroot00000000000000

Welcome to Simple App 1 Home Page.

jupyter_server-1.13.1/examples/simple/simple_ext1/static/index.d.ts000066400000000000000000000000371415445537200254300ustar00rootroot00000000000000declare function main(): void; jupyter_server-1.13.1/examples/simple/simple_ext1/static/index.js000066400000000000000000000002241415445537200251720ustar00rootroot00000000000000function main() { let div = document.getElementById("mydiv"); div.innerText = "Hello from Typescript"; } window.addEventListener("load", main); jupyter_server-1.13.1/examples/simple/simple_ext1/static/test.html000066400000000000000000000000611415445537200253710ustar00rootroot00000000000000

Hello Simple App 1 from test HTML page.

jupyter_server-1.13.1/examples/simple/simple_ext1/static/tsconfig.tsbuildinfo000066400000000000000000000112261415445537200276110ustar00rootroot00000000000000{ "program": { "fileInfos": { "../../node_modules/typescript/lib/lib.es5.d.ts": { "version": "ff5688d6b2fcfef06842a395d7ff4d5730d45b724d4c48913118c889829052a1", "signature": "ff5688d6b2fcfef06842a395d7ff4d5730d45b724d4c48913118c889829052a1" }, "../../node_modules/typescript/lib/lib.es2015.d.ts": { "version": "7994d44005046d1413ea31d046577cdda33b8b2470f30281fd9c8b3c99fe2d96", "signature": "7994d44005046d1413ea31d046577cdda33b8b2470f30281fd9c8b3c99fe2d96" }, "../../node_modules/typescript/lib/lib.dom.d.ts": { "version": "2d53f3741e5a4f78a90f623387d71a1cc809bb258f10cdaec034b67cbf71022f", "signature": "2d53f3741e5a4f78a90f623387d71a1cc809bb258f10cdaec034b67cbf71022f" }, "../../node_modules/typescript/lib/lib.es2015.core.d.ts": { "version": "4ab19088d508f9e62bfc61c157e8a65b2afaefa251ecca315e7d20b5b97b256f", "signature": "4ab19088d508f9e62bfc61c157e8a65b2afaefa251ecca315e7d20b5b97b256f" }, "../../node_modules/typescript/lib/lib.es2015.collection.d.ts": { "version": "dd94d8ef48c562389eb58af8df3a3a34d11367f7c818192aa5f16470d469e3f0", "signature": "dd94d8ef48c562389eb58af8df3a3a34d11367f7c818192aa5f16470d469e3f0" }, "../../node_modules/typescript/lib/lib.es2015.generator.d.ts": { "version": "765e0e9c9d74cf4d031ca8b0bdb269a853e7d81eda6354c8510218d03db12122", "signature": "765e0e9c9d74cf4d031ca8b0bdb269a853e7d81eda6354c8510218d03db12122" }, "../../node_modules/typescript/lib/lib.es2015.iterable.d.ts": { "version": "285958e7699f1babd76d595830207f18d719662a0c30fac7baca7df7162a9210", "signature": "285958e7699f1babd76d595830207f18d719662a0c30fac7baca7df7162a9210" }, "../../node_modules/typescript/lib/lib.es2015.promise.d.ts": { "version": "e6b8ff2798f8ebd7a1c7afd8671f2cb67ee1901c422f5964d74b0b34c6574ea2", "signature": "e6b8ff2798f8ebd7a1c7afd8671f2cb67ee1901c422f5964d74b0b34c6574ea2" }, "../../node_modules/typescript/lib/lib.es2015.proxy.d.ts": { "version": "5e72f949a89717db444e3bd9433468890068bb21a5638d8ab15a1359e05e54fe", "signature": "5e72f949a89717db444e3bd9433468890068bb21a5638d8ab15a1359e05e54fe" }, "../../node_modules/typescript/lib/lib.es2015.reflect.d.ts": { "version": "f5b242136ae9bfb1cc99a5971cccc44e99947ae6b5ef6fd8aa54b5ade553b976", "signature": "f5b242136ae9bfb1cc99a5971cccc44e99947ae6b5ef6fd8aa54b5ade553b976" }, "../../node_modules/typescript/lib/lib.es2015.symbol.d.ts": { "version": "9ae2860252d6b5f16e2026d8a2c2069db7b2a3295e98b6031d01337b96437230", "signature": "9ae2860252d6b5f16e2026d8a2c2069db7b2a3295e98b6031d01337b96437230" }, "../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts": { "version": "3e0a459888f32b42138d5a39f706ff2d55d500ab1031e0988b5568b0f67c2303", "signature": "3e0a459888f32b42138d5a39f706ff2d55d500ab1031e0988b5568b0f67c2303" }, "../../src/index.ts": { "version": "fd4f62325debd29128c1990caa4d546f2c48c21ea133fbcbb3e29f9fbef55e49", "signature": "ed4b087ea2a2e4a58647864cf512c7534210bfc2f9d236a2f9ed5245cf7a0896" } }, "options": { "outDir": "./", "allowSyntheticDefaultImports": true, "composite": true, "declaration": true, "noImplicitAny": true, "noEmitOnError": true, "noUnusedLocals": true, "esModuleInterop": true, "preserveWatchOutput": true, "module": 1, "moduleResolution": 2, "target": 2, "lib": [ "lib.dom.d.ts", "lib.es2015.d.ts" ], "jsx": 2, "types": [], "project": "../../src", "configFilePath": "../../src/tsconfig.json" }, "referencedMap": {}, "exportedModulesMap": {}, "semanticDiagnosticsPerFile": [ "../../node_modules/typescript/lib/lib.es5.d.ts", "../../node_modules/typescript/lib/lib.es2015.d.ts", "../../node_modules/typescript/lib/lib.dom.d.ts", "../../node_modules/typescript/lib/lib.es2015.core.d.ts", "../../node_modules/typescript/lib/lib.es2015.collection.d.ts", "../../node_modules/typescript/lib/lib.es2015.generator.d.ts", "../../node_modules/typescript/lib/lib.es2015.iterable.d.ts", "../../node_modules/typescript/lib/lib.es2015.promise.d.ts", "../../node_modules/typescript/lib/lib.es2015.proxy.d.ts", "../../node_modules/typescript/lib/lib.es2015.reflect.d.ts", "../../node_modules/typescript/lib/lib.es2015.symbol.d.ts", "../../node_modules/typescript/lib/lib.es2015.symbol.wellknown.d.ts", "../../src/index.ts" ] }, "version": "3.6.4" } jupyter_server-1.13.1/examples/simple/simple_ext1/templates/000077500000000000000000000000001415445537200242365ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/simple_ext1/templates/error.html000066400000000000000000000007111415445537200262540ustar00rootroot00000000000000{% extends "page.html" %} {% block site %}
{% block h1_error %}

Error Page

{{status_code}} : {{status_message}}

{% endblock h1_error %} {% block error_detail %} {% if message %}

{% trans %}The error was:{% endtrans %}

{{message}}
{% endif %} {% endblock error_detail %}
{% endblock %} jupyter_server-1.13.1/examples/simple/simple_ext1/templates/page.html000066400000000000000000000011171415445537200260400ustar00rootroot00000000000000 {% block title %}Jupyter Server 1{% endblock %} {% block favicon %}{% endblock %} {% block meta %} {% endblock %}
{% block site %} {% endblock site %}
{% block after_site %} {% endblock after_site %} jupyter_server-1.13.1/examples/simple/simple_ext1/templates/simple1.html000066400000000000000000000007451415445537200265040ustar00rootroot00000000000000

Hello Simple App 1 from Template.

Path: {{path}}

jupyter_server-1.13.1/examples/simple/simple_ext1/templates/typescript.html000066400000000000000000000010621415445537200273310ustar00rootroot00000000000000

Hello world!

jupyter_server-1.13.1/examples/simple/simple_ext11/000077500000000000000000000000001415445537200223215ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/simple_ext11/__init__.py000066400000000000000000000002261415445537200244320ustar00rootroot00000000000000from .application import SimpleApp11 def _jupyter_server_extension_paths(): return [{"module": "simple_ext11.application", "app": SimpleApp11}] jupyter_server-1.13.1/examples/simple/simple_ext11/__main__.py000066400000000000000000000001051415445537200244070ustar00rootroot00000000000000from .application import main if __name__ == "__main__": main() jupyter_server-1.13.1/examples/simple/simple_ext11/application.py000066400000000000000000000042641415445537200252040ustar00rootroot00000000000000import os from simple_ext1.application import SimpleApp1 from traitlets import Bool from traitlets import observe from traitlets import Unicode from jupyter_server.serverapp import aliases from jupyter_server.serverapp import flags DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "./../simple_ext1/static") DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "./../simple_ext1/templates") class SimpleApp11(SimpleApp1): flags["hello"] = ({"SimpleApp11": {"hello": True}}, "Say hello on startup.") aliases.update( { "simple11-dir": "SimpleApp11.simple11_dir", } ) # The name of the extension. name = "simple_ext11" # Te url that your extension will serve its homepage. extension_url = "/simple_ext11/default" # Local path to static files directory. static_paths = [DEFAULT_STATIC_FILES_PATH] # Local path to templates directory. template_paths = [DEFAULT_TEMPLATE_FILES_PATH] simple11_dir = Unicode("", config=True, help="Simple directory") hello = Bool( False, config=True, help="Say hello", ) ignore_js = Bool( False, config=True, help="Ignore Javascript", ) @observe("ignore_js") def _update_ignore_js(self, change): """TODO Does the observe work?""" self.log.info("ignore_js has just changed: {}".format(change)) @property def simple11_dir_formatted(self): return "/" + self.simple11_dir def initialize_settings(self): self.log.info("hello: {}".format(self.hello)) if self.hello == True: self.log.info( "Hello Simple11: You have launched with --hello flag or defined 'c.SimpleApp1.hello == True' in your config file" ) self.log.info("ignore_js: {}".format(self.ignore_js)) super().initialize_settings() def initialize_handlers(self): super().initialize_handlers() # ----------------------------------------------------------------------------- # Main entry point # ----------------------------------------------------------------------------- main = launch_new_instance = SimpleApp11.launch_instance jupyter_server-1.13.1/examples/simple/simple_ext2/000077500000000000000000000000001415445537200222415ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/simple_ext2/__init__.py000066400000000000000000000002421415445537200243500ustar00rootroot00000000000000from .application import SimpleApp2 def _jupyter_server_extension_paths(): return [ {"module": "simple_ext2.application", "app": SimpleApp2}, ] jupyter_server-1.13.1/examples/simple/simple_ext2/__main__.py000066400000000000000000000001051415445537200243270ustar00rootroot00000000000000from .application import main if __name__ == "__main__": main() jupyter_server-1.13.1/examples/simple/simple_ext2/application.py000066400000000000000000000033011415445537200251130ustar00rootroot00000000000000import os from traitlets import Unicode from .handlers import ErrorHandler from .handlers import IndexHandler from .handlers import ParameterHandler from .handlers import TemplateHandler from jupyter_server.extension.application import ExtensionApp from jupyter_server.extension.application import ExtensionAppJinjaMixin DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_FILES_PATH = os.path.join(os.path.dirname(__file__), "templates") class SimpleApp2(ExtensionAppJinjaMixin, ExtensionApp): # The name of the extension. name = "simple_ext2" # Te url that your extension will serve its homepage. extension_url = "/simple_ext2" # Should your extension expose other server extensions when launched directly? load_other_extensions = True # Local path to static files directory. static_paths = [DEFAULT_STATIC_FILES_PATH] # Local path to templates directory. template_paths = [DEFAULT_TEMPLATE_FILES_PATH] configD = Unicode("", config=True, help="Config D example.") def initialize_handlers(self): self.handlers.extend( [ (r"/simple_ext2/params/(.+)$", ParameterHandler), (r"/simple_ext2/template", TemplateHandler), (r"/simple_ext2/?", IndexHandler), (r"/simple_ext2/(.*)", ErrorHandler), ] ) def initialize_settings(self): self.log.info("Config {}".format(self.config)) # ----------------------------------------------------------------------------- # Main entry point # ----------------------------------------------------------------------------- main = launch_new_instance = SimpleApp2.launch_instance jupyter_server-1.13.1/examples/simple/simple_ext2/handlers.py000066400000000000000000000024141415445537200244140ustar00rootroot00000000000000from jupyter_server.base.handlers import JupyterHandler from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin from jupyter_server.extension.handler import ExtensionHandlerMixin from jupyter_server.utils import url_escape class ParameterHandler(ExtensionHandlerMixin, JupyterHandler): def get(self, matched_part=None, *args, **kwargs): var1 = self.get_argument("var1", default=None) components = [x for x in self.request.path.split("/") if x] self.write("

Hello Simple App 2 from Handler.

") self.write("

matched_part: {}

".format(url_escape(matched_part))) self.write("

var1: {}

".format(url_escape(var1))) self.write("

components: {}

".format(components)) class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): pass class IndexHandler(BaseTemplateHandler): def get(self): self.write(self.render_template("index.html")) class TemplateHandler(BaseTemplateHandler): def get(self, path): print(self.get_template("simple_ext2.html")) self.write(self.render_template("simple_ext2.html", path=path)) class ErrorHandler(BaseTemplateHandler): def get(self, path): self.write(self.render_template("error.html")) jupyter_server-1.13.1/examples/simple/simple_ext2/static/000077500000000000000000000000001415445537200235305ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/simple_ext2/static/favicon.ico000066400000000000000000000764461415445537200256720ustar00rootroot00000000000000 hF 00 %V@@ (B:(  @&w&wW&ww&ww&wW&w&w%&w&w&w&w&w&w&w&w&w%&wO&w&w&w&w&w&w&w&w&w&w&wO&wA&w&w&wW'w&x&wW&w&w&wA'y &w&w)&w)&w'y 'x&z'x'x'x'x&z'x'y &w&w)&w)&w'y &wA&w&w&wW&w&x&wW&w&w&wA&wO&w&w&w&w&w&w&w&w&w&w&wO&w%&w&w&w&w&w&w&w&w&w%&w&wW&ww&ww&wW'w( @ (w &xW&w&w&w&w&w&w&w&w&xW(w 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1(x%&w&w&w&w&w&w'wM'z'z&wM&w&w&w&w&w&w(x%'| &w&w&w&w'w)(w)&w&w&w&w'| &w&w&w(x&z&w&w&w'y%&w'x5(x5&w'y%'xg&z'x'xe+x +x +x +x 'xe'x&z'xg'y%&w(x5'x5&w'y%&w&w&w'x(x&w&w&w'| &w&w&w&w(x''w)&w&w&w&w'| (x%&w&w&w&w&w&w&wM'z&{'wM&w&w&w&w&w&w(x%'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#(w &xW&w&w&w&w&w&w&w&w'wU(w (0` %+z 'x/'xO&wa&wm%wy%wy&wm&wa'xO'x/+z (w &wE&w&w&w&w&w&w&w&w&w&w&w&w&w&x&wE(w )z 'y3&xy&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&xy'y3)z )y!&w&w&w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w&v&w&w)y!+y &xY&w%v%v&w%w%v&w%w%v&w%w%v&w&v&w&w&v&w&w&v&w&w&v&w&v&w&xY+y 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&w%w&w&w&w&w'x.'y/&w&v&v&w&v&w%w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w%v&w%w%v&w%w&w'y/.'z%&w&w&w&w&w%w%v&w%w%v&w&w&w&w&w&w&w&w&w&w&w&w%v&v&w&w&v&w&w&v&w&w&w&w'z'(w&w&w&w&w&w&w&w&w&w'w'wI'|(z'xG&w}&w&w&v&w%w&w&w%w&w'w&x&w&v&v&w%v&v&w'x_'w+(w*}(x+&w]&w&v&w%v&w%w&w&w'|'wu&w&w&w&w'ws'w/'w)w(w-&wo&v&w&w&w'wu'|&vI&w&w&w&xk(x&z &wi&w&w&v&wI& &w%w'x'y+&w)'w&w&w& 'yM&w&wi*z+x'wg&w'yM)}&x&y)'x)&w)}(x#(x+(x+(x#+x+x+x+w*w+x+x+x(x#(x+(x+(x#)}&w'x)&z)&x)}'yM&w'wg+x)x&wg&w'yM& &w&w'w&w)'y+&w&w&w% &vG&w&v&w'wi'x 'x&xk&w&w&v&wI'|&wu&w&w&w&w&wo(x-)y'w&x/&ws&w&w&w&w'wu'|'x&w%w&w&w&w&w&w&w](x)*}(w'w+'x_&w&w&v&w&v&w&w&w(x&w%w&w&w%w&w&w&w&w&v}'xG(y&}&xI'v&w&w&w&w&v&w&w&v&w(x'z'&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'z'.'y/&w%w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&v&w&w&v&w&w&v&w&w&v&w&w&v&w&w'y/-'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&w&w&w'x+y &xY&w&w&w%w%w&w%w%w&w%w%w&w%w&w&v&w&w&v&w&w&v&w&w&v%v&w&xY+y )z!&w&w&w%w&w&w%w&w&w%w&w&w&w&w%w&w&w%w&w&w%w%w&w&w)z!)z 'y3&wy&w&w%w%w&w%w%w&w%w&w&w&w&w&w&w&w&w&w&wy&y3)z (w &wE&w&w&w&w&w&w&w&w%w&w&w&w&w'w'wE(w +z 'x/'xO&va&wm&wy&wy&wm&wa'xO'x/+z (@ B3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x&x(w-. 'yc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'yc. 3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'w'y3 )zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x&x'x'w&x(yu'wi'wi(yu&x'x'w'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w'w'w}'xC')z'zA(w{&x'x&w&w&w&w&w&w&w&w&w&w'x(x&w&w&w&w&w&w&w&w'x'x}(w-*}+(w{&w&w&w&w&w&w&w&w&w(x'xi&w&w&w&w&w&w&w'w}(y'+x%(w{'w&w&w&w&w&w&w'xi'|!'w&w&w&w&w'x(w[@@&xW&w&w&w&w&w'w'|!'w&w&w&w&x(xG&zC'w&w&w&w'w'wi&w&w'x'yo@U&wk'w&w&w'wi&'x&w&x.1y'x&w'x&(y&x&x'x}&x(y. &x&zI'xI&w. (ye)xE)xE(ye+x%+x%+x%+x%(ye)xE)xE(ye. &w'xI&zI&x. (y&x'x}&x&x(y&'x&w'x1y,z&x&w'x&'wi&w&w'w&wkU@'yo'x&w&w'wi'w&w&w&w'w'xC(xG&x&w&w&w'w'|!'w&w&w&w&w&w&xWU3(w['x&w&w&w&w'w'|!'xi&w&w&w&w&w&w'w(xy,|#(y''w}&w&w&w&w&w&w&w'xi(x&w&w&w&w&w&w&w&w&x(w{*}+(w-'x}'x&w&w&w&w&w&w&w&w(x'x&w&w&w&w&w&w&w&w&w&w'w&w(w{'zA)z'&zC'w}'w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'w'x&x(yu'wi'wi(yu&x'w'x&x&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+)zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'y3 . 'xc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'xc. (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x'w(w-3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 jupyter_server-1.13.1/examples/simple/simple_ext2/static/test.html000066400000000000000000000000611415445537200253720ustar00rootroot00000000000000

Hello Simple App 2 from test HTML page.

jupyter_server-1.13.1/examples/simple/simple_ext2/templates/000077500000000000000000000000001415445537200242375ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/simple_ext2/templates/error.html000066400000000000000000000006611415445537200262610ustar00rootroot00000000000000{% extends "page.html" %} {% block site %}
{% block h1_error %}

{{status_code}} : {{status_message}}

{% endblock h1_error %} {% block error_detail %} {% if message %}

{% trans %}The error was:{% endtrans %}

{{message}}
{% endif %} {% endblock error_detail %}
{% endblock %} jupyter_server-1.13.1/examples/simple/simple_ext2/templates/index.html000066400000000000000000000000671415445537200262370ustar00rootroot00000000000000

Hello Extension 2 from HTML Index Static Page

jupyter_server-1.13.1/examples/simple/simple_ext2/templates/page.html000066400000000000000000000011171415445537200260410ustar00rootroot00000000000000 {% block title %}Jupyter Server 1{% endblock %} {% block favicon %}{% endblock %} {% block meta %} {% endblock %}
{% block site %} {% endblock site %}
{% block after_site %} {% endblock after_site %} jupyter_server-1.13.1/examples/simple/simple_ext2/templates/simple_ext2.html000066400000000000000000000000701415445537200273550ustar00rootroot00000000000000

Hello Extension 2 from Simple HTML Static Page

jupyter_server-1.13.1/examples/simple/src/000077500000000000000000000000001415445537200205755ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/src/index.ts000066400000000000000000000002251415445537200222530ustar00rootroot00000000000000function main() { let div = document.getElementById("mydiv"); div.innerText = "Hello from Typescript"; } window.addEventListener("load", main); jupyter_server-1.13.1/examples/simple/src/tsconfig.json000066400000000000000000000007301415445537200233040ustar00rootroot00000000000000{ "compilerOptions": { "outDir": "../simple_ext1/static", "allowSyntheticDefaultImports": true, "composite": true, "declaration": true, "noImplicitAny": true, "noEmitOnError": true, "noUnusedLocals": true, "esModuleInterop": true, "preserveWatchOutput": true, "module": "commonjs", "moduleResolution": "node", "target": "es2015", "lib": ["dom", "es2015"], "jsx": "react", "types": [] }, "include": ["*"] } jupyter_server-1.13.1/examples/simple/tests/000077500000000000000000000000001415445537200211505ustar00rootroot00000000000000jupyter_server-1.13.1/examples/simple/tests/test_handlers.py000066400000000000000000000010221415445537200243540ustar00rootroot00000000000000import pytest @pytest.fixture def jp_server_config(jp_template_dir): return { "ServerApp": {"jpserver_extensions": {"simple_ext1": True}}, } async def test_handler_default(jp_fetch): r = await jp_fetch("simple_ext1/default", method="GET") assert r.code == 200 print(r.body.decode()) assert r.body.decode().index("Hello Simple 1 - I am the default...") > -1 async def test_handler_template(jp_fetch): r = await jp_fetch("simple_ext1/template1/test", method="GET") assert r.code == 200 jupyter_server-1.13.1/examples/simple/webpack.config.js000066400000000000000000000003111415445537200232170ustar00rootroot00000000000000module.exports = { entry: ["./simple_ext1/static/index.js"], output: { path: require("path").join(__dirname, "simple_ext1", "static"), filename: "bundle.js", }, mode: "development", }; jupyter_server-1.13.1/jupyter_server/000077500000000000000000000000001415445537200177675ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/__init__.py000066400000000000000000000012541415445537200221020ustar00rootroot00000000000000"""The Jupyter Server""" import os import subprocess import sys DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_PATH_LIST = [ os.path.dirname(__file__), os.path.join(os.path.dirname(__file__), "templates"), ] DEFAULT_JUPYTER_SERVER_PORT = 8888 del os from ._version import version_info, __version__ def _cleanup(): pass # patch subprocess on Windows for python<3.7 # see https://bugs.python.org/issue37380 # the fix for python3.7: https://github.com/python/cpython/pull/15706/files if sys.platform == "win32": if sys.version_info < (3, 7): subprocess._cleanup = _cleanup subprocess._active = None jupyter_server-1.13.1/jupyter_server/__main__.py000066400000000000000000000001521415445537200220570ustar00rootroot00000000000000if __name__ == "__main__": from jupyter_server import serverapp as app app.launch_new_instance() jupyter_server-1.13.1/jupyter_server/_sysinfo.py000066400000000000000000000047701415445537200222020ustar00rootroot00000000000000# encoding: utf-8 """ Utilities for getting information about Jupyter and the system it's running in. """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os import platform import subprocess import sys from ipython_genutils import encoding import jupyter_server def pkg_commit_hash(pkg_path): """Get short form of commit hash given directory `pkg_path` We get the commit hash from git if it's a repo. If this fail, we return a not-found placeholder tuple Parameters ---------- pkg_path : str directory containing package only used for getting commit from active repo Returns ------- hash_from : str Where we got the hash from - description hash_str : str short form of hash """ # maybe we are in a repository, check for a .git folder p = os.path cur_path = None par_path = pkg_path while cur_path != par_path: cur_path = par_path if p.exists(p.join(cur_path, ".git")): try: proc = subprocess.Popen( ["git", "rev-parse", "--short", "HEAD"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=pkg_path, ) repo_commit, _ = proc.communicate() except OSError: repo_commit = None if repo_commit: return "repository", repo_commit.strip().decode("ascii") else: return u"", u"" par_path = p.dirname(par_path) return u"", u"" def pkg_info(pkg_path): """Return dict describing the context of this package Parameters ---------- pkg_path : str path containing __init__.py for package Returns ------- context : dict with named parameters of interest """ src, hsh = pkg_commit_hash(pkg_path) return dict( jupyter_server_version=jupyter_server.__version__, jupyter_server_path=pkg_path, commit_source=src, commit_hash=hsh, sys_version=sys.version, sys_executable=sys.executable, sys_platform=sys.platform, platform=platform.platform(), os_name=os.name, default_encoding=encoding.DEFAULT_ENCODING, ) def get_sys_info(): """Return useful information about the system as a dict.""" p = os.path path = p.realpath(p.dirname(p.abspath(p.join(jupyter_server.__file__)))) return pkg_info(path) jupyter_server-1.13.1/jupyter_server/_tz.py000066400000000000000000000017431415445537200211420ustar00rootroot00000000000000# encoding: utf-8 """ Timezone utilities Just UTC-awareness right now """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from datetime import datetime from datetime import timedelta from datetime import tzinfo # constant for zero offset ZERO = timedelta(0) class tzUTC(tzinfo): """tzinfo object for UTC (zero offset)""" def utcoffset(self, d): return ZERO def dst(self, d): return ZERO UTC = tzUTC() def utc_aware(unaware): """decorator for adding UTC tzinfo to datetime's utcfoo methods""" def utc_method(*args, **kwargs): dt = unaware(*args, **kwargs) return dt.replace(tzinfo=UTC) return utc_method utcfromtimestamp = utc_aware(datetime.utcfromtimestamp) utcnow = utc_aware(datetime.utcnow) def isoformat(dt): """Return iso-formatted timestamp Like .isoformat(), but uses Z for UTC instead of +00:00 """ return dt.isoformat().replace("+00:00", "Z") jupyter_server-1.13.1/jupyter_server/_version.py000066400000000000000000000002501415445537200221620ustar00rootroot00000000000000""" store the current version info of the server. """ version_info = (1, 13, 1, "", "") __version__ = ".".join(map(str, version_info[:3])) + "".join(version_info[3:]) jupyter_server-1.13.1/jupyter_server/auth/000077500000000000000000000000001415445537200207305ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/auth/__init__.py000066400000000000000000000000351415445537200230370ustar00rootroot00000000000000from .security import passwd jupyter_server-1.13.1/jupyter_server/auth/__main__.py000066400000000000000000000032441415445537200230250ustar00rootroot00000000000000import argparse import sys from getpass import getpass from jupyter_core.paths import jupyter_config_dir from jupyter_server.auth import passwd from jupyter_server.config_manager import BaseJSONConfigManager def set_password(args): password = args.password while not password: password1 = getpass("" if args.quiet else "Provide password: ") password_repeat = getpass("" if args.quiet else "Repeat password: ") if password1 != password_repeat: print("Passwords do not match, try again") elif len(password1) < 4: print("Please provide at least 4 characters") else: password = password1 password_hash = passwd(password) cfg = BaseJSONConfigManager(config_dir=jupyter_config_dir()) cfg.update( "jupyter_server_config", { "ServerApp": { "password": password_hash, } }, ) if not args.quiet: print("password stored in config dir: %s" % jupyter_config_dir()) def main(argv): parser = argparse.ArgumentParser(argv[0]) subparsers = parser.add_subparsers() parser_password = subparsers.add_parser( "password", help="sets a password for your jupyter server" ) parser_password.add_argument( "password", help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", nargs="?", ) parser_password.add_argument("--quiet", help="suppress messages", action="store_true") parser_password.set_defaults(function=set_password) args = parser.parse_args(argv[1:]) args.function(args) if __name__ == "__main__": main(sys.argv) jupyter_server-1.13.1/jupyter_server/auth/login.py000066400000000000000000000235651415445537200224250ustar00rootroot00000000000000"""Tornado handlers for logging into the Jupyter Server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os import re import uuid from urllib.parse import urlparse from tornado.escape import url_escape from ..base.handlers import JupyterHandler from .security import passwd_check from .security import set_password class LoginHandler(JupyterHandler): """The basic tornado login handler authenticates with a hashed password from the configuration. """ def _render(self, message=None): self.write( self.render_template( "login.html", next=url_escape(self.get_argument("next", default=self.base_url)), message=message, ) ) def _redirect_safe(self, url, default=None): """Redirect if url is on our PATH Full-domain redirects are allowed if they pass our CORS origin checks. Otherwise use default (self.base_url if unspecified). """ if default is None: default = self.base_url # protect chrome users from mishandling unescaped backslashes. # \ is not valid in urls, but some browsers treat it as / # instead of %5C, causing `\\` to behave as `//` url = url.replace("\\", "%5C") parsed = urlparse(url) if parsed.netloc or not (parsed.path + "/").startswith(self.base_url): # require that next_url be absolute path within our path allow = False # OR pass our cross-origin check if parsed.netloc: # if full URL, run our cross-origin check: origin = "%s://%s" % (parsed.scheme, parsed.netloc) origin = origin.lower() if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: allow = bool(re.match(self.allow_origin_pat, origin)) if not allow: # not allowed, use default self.log.warning("Not allowing login redirect to %r" % url) url = default self.redirect(url) def get(self): if self.current_user: next_url = self.get_argument("next", default=self.base_url) self._redirect_safe(next_url) else: self._render() @property def hashed_password(self): return self.password_from_settings(self.settings) def passwd_check(self, a, b): return passwd_check(a, b) def post(self): typed_password = self.get_argument("password", default=u"") new_password = self.get_argument("new_password", default=u"") if self.get_login_available(self.settings): if self.passwd_check(self.hashed_password, typed_password) and not new_password: self.set_login_cookie(self, uuid.uuid4().hex) elif self.token and self.token == typed_password: self.set_login_cookie(self, uuid.uuid4().hex) if new_password and self.settings.get("allow_password_change"): config_dir = self.settings.get("config_dir") config_file = os.path.join(config_dir, "jupyter_server_config.json") set_password(new_password, config_file=config_file) self.log.info("Wrote hashed password to %s" % config_file) else: self.set_status(401) self._render(message={"error": "Invalid credentials"}) return next_url = self.get_argument("next", default=self.base_url) self._redirect_safe(next_url) @classmethod def set_login_cookie(cls, handler, user_id=None): """Call this on handlers to set the login cookie for success""" cookie_options = handler.settings.get("cookie_options", {}) cookie_options.setdefault("httponly", True) # tornado <4.2 has a bug that considers secure==True as soon as # 'secure' kwarg is passed to set_secure_cookie if handler.settings.get("secure_cookie", handler.request.protocol == "https"): cookie_options.setdefault("secure", True) cookie_options.setdefault("path", handler.base_url) handler.set_secure_cookie(handler.cookie_name, user_id, **cookie_options) return user_id auth_header_pat = re.compile(r"token\s+(.+)", re.IGNORECASE) @classmethod def get_token(cls, handler): """Get the user token from a request Default: - in URL parameters: ?token= - in header: Authorization: token """ user_token = handler.get_argument("token", "") if not user_token: # get it from Authorization header m = cls.auth_header_pat.match(handler.request.headers.get("Authorization", "")) if m: user_token = m.group(1) return user_token @classmethod def should_check_origin(cls, handler): """Should the Handler check for CORS origin validation? Origin check should be skipped for token-authenticated requests. Returns: - True, if Handler must check for valid CORS origin. - False, if Handler should skip origin check since requests are token-authenticated. """ return not cls.is_token_authenticated(handler) @classmethod def is_token_authenticated(cls, handler): """Returns True if handler has been token authenticated. Otherwise, False. Login with a token is used to signal certain things, such as: - permit access to REST API - xsrf protection - skip origin-checks for scripts """ if getattr(handler, "_user_id", None) is None: # ensure get_user has been called, so we know if we're token-authenticated handler.get_current_user() return getattr(handler, "_token_authenticated", False) @classmethod def get_user(cls, handler): """Called by handlers.get_current_user for identifying the current user. See tornado.web.RequestHandler.get_current_user for details. """ # Can't call this get_current_user because it will collide when # called on LoginHandler itself. if getattr(handler, "_user_id", None): return handler._user_id user_id = cls.get_user_token(handler) if user_id is None: get_secure_cookie_kwargs = handler.settings.get("get_secure_cookie_kwargs", {}) user_id = handler.get_secure_cookie(handler.cookie_name, **get_secure_cookie_kwargs) if user_id: user_id = user_id.decode() else: cls.set_login_cookie(handler, user_id) # Record that the current request has been authenticated with a token. # Used in is_token_authenticated above. handler._token_authenticated = True if user_id is None: # If an invalid cookie was sent, clear it to prevent unnecessary # extra warnings. But don't do this on a request with *no* cookie, # because that can erroneously log you out (see gh-3365) if handler.get_cookie(handler.cookie_name) is not None: handler.log.warning("Clearing invalid/expired login cookie %s", handler.cookie_name) handler.clear_login_cookie() if not handler.login_available: # Completely insecure! No authentication at all. # No need to warn here, though; validate_security will have already done that. user_id = "anonymous" # cache value for future retrievals on the same request handler._user_id = user_id return user_id @classmethod def get_user_token(cls, handler): """Identify the user based on a token in the URL or Authorization header Returns: - uuid if authenticated - None if not """ token = handler.token if not token: return # check login token from URL argument or Authorization header user_token = cls.get_token(handler) authenticated = False if user_token == token: # token-authenticated, set the login cookie handler.log.debug( "Accepting token-authenticated connection from %s", handler.request.remote_ip ) authenticated = True if authenticated: return uuid.uuid4().hex else: return None @classmethod def validate_security(cls, app, ssl_options=None): """Check the application's security. Show messages, or abort if necessary, based on the security configuration. """ if not app.ip: warning = "WARNING: The Jupyter server is listening on all IP addresses" if ssl_options is None: app.log.warning(warning + " and not using encryption. This " "is not recommended.") if not app.password and not app.token: app.log.warning( warning + " and not using authentication. " "This is highly insecure and not recommended." ) else: if not app.password and not app.token: app.log.warning( "All authentication is disabled." " Anyone who can connect to this server will be able to run code." ) @classmethod def password_from_settings(cls, settings): """Return the hashed password from the tornado settings. If there is no configured password, an empty string will be returned. """ return settings.get("password", u"") @classmethod def get_login_available(cls, settings): """Whether this LoginHandler is needed - and therefore whether the login page should be displayed.""" return bool(cls.password_from_settings(settings) or settings.get("token")) jupyter_server-1.13.1/jupyter_server/auth/logout.py000066400000000000000000000011641415445537200226150ustar00rootroot00000000000000"""Tornado handlers for logging out of the Jupyter Server. """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from ..base.handlers import JupyterHandler class LogoutHandler(JupyterHandler): def get(self): self.clear_login_cookie() if self.login_available: message = {"info": "Successfully logged out."} else: message = {"warning": "Cannot log out. Jupyter Server authentication " "is disabled."} self.write(self.render_template("logout.html", message=message)) default_handlers = [(r"/logout", LogoutHandler)] jupyter_server-1.13.1/jupyter_server/auth/security.py000066400000000000000000000114751415445537200231610ustar00rootroot00000000000000""" Password generation for the Jupyter Server. """ import getpass import hashlib import io import json import os import random import traceback import warnings from contextlib import contextmanager from ipython_genutils.py3compat import cast_bytes from ipython_genutils.py3compat import cast_unicode from ipython_genutils.py3compat import str_to_bytes from jupyter_core.paths import jupyter_config_dir from traitlets.config import Config from traitlets.config import ConfigFileNotFound from traitlets.config import JSONFileConfigLoader # Length of the salt in nr of hex chars, which implies salt_len * 4 # bits of randomness. salt_len = 12 def passwd(passphrase=None, algorithm="argon2"): """Generate hashed password and salt for use in server configuration. In the server configuration, set `c.ServerApp.password` to the generated string. Parameters ---------- passphrase : str Password to hash. If unspecified, the user is asked to input and verify a password. algorithm : str Hashing algorithm to use (e.g, 'sha1' or any argument supported by :func:`hashlib.new`, or 'argon2'). Returns ------- hashed_passphrase : str Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. Examples -------- >>> passwd('mypassword') # doctest: +ELLIPSIS 'argon2:...' """ if passphrase is None: for i in range(3): p0 = getpass.getpass("Enter password: ") p1 = getpass.getpass("Verify password: ") if p0 == p1: passphrase = p0 break else: print("Passwords do not match.") else: raise ValueError("No matching passwords found. Giving up.") if algorithm == "argon2": import argon2 ph = argon2.PasswordHasher( memory_cost=10240, time_cost=10, parallelism=8, ) h = ph.hash(passphrase) return ":".join((algorithm, cast_unicode(h, "ascii"))) h = hashlib.new(algorithm) salt = ("%0" + str(salt_len) + "x") % random.getrandbits(4 * salt_len) h.update(cast_bytes(passphrase, "utf-8") + str_to_bytes(salt, "ascii")) return ":".join((algorithm, salt, h.hexdigest())) def passwd_check(hashed_passphrase, passphrase): """Verify that a given passphrase matches its hashed version. Parameters ---------- hashed_passphrase : str Hashed password, in the format returned by `passwd`. passphrase : str Passphrase to validate. Returns ------- valid : bool True if the passphrase matches the hash. Examples -------- >>> myhash = passwd('mypassword') >>> passwd_check(myhash, 'mypassword') True >>> passwd_check(myhash, 'otherpassword') False >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', ... 'mypassword') True """ if hashed_passphrase.startswith("argon2:"): import argon2 import argon2.exceptions ph = argon2.PasswordHasher() try: return ph.verify(hashed_passphrase[7:], passphrase) except argon2.exceptions.VerificationError: return False try: algorithm, salt, pw_digest = hashed_passphrase.split(":", 2) except (ValueError, TypeError): return False try: h = hashlib.new(algorithm) except ValueError: return False if len(pw_digest) == 0: return False h.update(cast_bytes(passphrase, "utf-8") + cast_bytes(salt, "ascii")) return h.hexdigest() == pw_digest @contextmanager def persist_config(config_file=None, mode=0o600): """Context manager that can be used to modify a config object On exit of the context manager, the config will be written back to disk, by default with user-only (600) permissions. """ if config_file is None: config_file = os.path.join(jupyter_config_dir(), "jupyter_server_config.json") os.makedirs(os.path.dirname(config_file), exist_ok=True) loader = JSONFileConfigLoader(os.path.basename(config_file), os.path.dirname(config_file)) try: config = loader.load_config() except ConfigFileNotFound: config = Config() yield config with io.open(config_file, "w", encoding="utf8") as f: f.write(cast_unicode(json.dumps(config, indent=2))) try: os.chmod(config_file, mode) except Exception as e: tb = traceback.format_exc() warnings.warn("Failed to set permissions on %s:\n%s" % (config_file, tb), RuntimeWarning) def set_password(password=None, config_file=None): """Ask user for password, store it in JSON configuration file""" hashed_password = passwd(password) with persist_config(config_file) as config: config.ServerApp.password = hashed_password jupyter_server-1.13.1/jupyter_server/base/000077500000000000000000000000001415445537200207015ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/base/__init__.py000066400000000000000000000000001415445537200230000ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/base/handlers.py000066400000000000000000001034011415445537200230520ustar00rootroot00000000000000"""Base Tornado handlers for the Jupyter server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import datetime import functools import ipaddress import json import mimetypes import os import re import traceback import types import warnings from http.client import responses from http.cookies import Morsel from urllib.parse import urlparse import prometheus_client from ipython_genutils.path import filefind from jinja2 import TemplateNotFound from jupyter_core.paths import is_hidden from tornado import escape from tornado import httputil from tornado import web from tornado.log import app_log from traitlets.config import Application import jupyter_server from jupyter_server._sysinfo import get_sys_info from jupyter_server._tz import utcnow from jupyter_server.i18n import combine_translations from jupyter_server.services.security import csp_report_uri from jupyter_server.utils import ensure_async from jupyter_server.utils import url_escape from jupyter_server.utils import url_is_absolute from jupyter_server.utils import url_path_join from jupyter_server.utils import urldecode_unix_socket_path # ----------------------------------------------------------------------------- # Top-level handlers # ----------------------------------------------------------------------------- non_alphanum = re.compile(r"[^A-Za-z0-9]") _sys_info_cache = None def json_sys_info(): global _sys_info_cache if _sys_info_cache is None: _sys_info_cache = json.dumps(get_sys_info()) return _sys_info_cache def log(): if Application.initialized(): return Application.instance().log else: return app_log class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" @property def content_security_policy(self): """The default Content-Security-Policy header Can be overridden by defining Content-Security-Policy in settings['headers'] """ if "Content-Security-Policy" in self.settings.get("headers", {}): # user-specified, don't override return self.settings["headers"]["Content-Security-Policy"] return "; ".join( [ "frame-ancestors 'self'", # Make sure the report-uri is relative to the base_url "report-uri " + self.settings.get("csp_report_uri", url_path_join(self.base_url, csp_report_uri)), ] ) def set_default_headers(self): headers = {} headers["X-Content-Type-Options"] = "nosniff" headers.update(self.settings.get("headers", {})) headers["Content-Security-Policy"] = self.content_security_policy # Allow for overriding headers for header_name, value in headers.items(): try: self.set_header(header_name, value) except Exception as e: # tornado raise Exception (not a subclass) # if method is unsupported (websocket and Access-Control-Allow-Origin # for example, so just ignore) self.log.debug(e) def force_clear_cookie(self, name, path="/", domain=None): """Deletes the cookie with the given name. Tornado's cookie handling currently (Jan 2018) stores cookies in a dict keyed by name, so it can only modify one cookie with a given name per response. The browser can store multiple cookies with the same name but different domains and/or paths. This method lets us clear multiple cookies with the same name. Due to limitations of the cookie protocol, you must pass the same path and domain to clear a cookie as were used when that cookie was set (but there is no way to find out on the server side which values were used for a given cookie). """ name = escape.native_str(name) expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) morsel = Morsel() morsel.set(name, "", '""') morsel["expires"] = httputil.format_timestamp(expires) morsel["path"] = path if domain: morsel["domain"] = domain self.add_header("Set-Cookie", morsel.OutputString()) def clear_login_cookie(self): cookie_options = self.settings.get("cookie_options", {}) path = cookie_options.setdefault("path", self.base_url) self.clear_cookie(self.cookie_name, path=path) if path and path != "/": # also clear cookie on / to ensure old cookies are cleared # after the change in path behavior. # N.B. This bypasses the normal cookie handling, which can't update # two cookies with the same name. See the method above. self.force_clear_cookie(self.cookie_name) def get_current_user(self): if self.login_handler is None: return "anonymous" return self.login_handler.get_user(self) def skip_check_origin(self): """Ask my login_handler if I should skip the origin_check For example: in the default LoginHandler, if a request is token-authenticated, origin checking should be skipped. """ if self.request.method == "OPTIONS": # no origin-check on options requests, which are used to check origins! return True if self.login_handler is None or not hasattr(self.login_handler, "should_check_origin"): return False return not self.login_handler.should_check_origin(self) @property def token_authenticated(self): """Have I been authenticated with a token?""" if self.login_handler is None or not hasattr(self.login_handler, "is_token_authenticated"): return False return self.login_handler.is_token_authenticated(self) @property def cookie_name(self): default_cookie_name = non_alphanum.sub("-", "username-{}".format(self.request.host)) return self.settings.get("cookie_name", default_cookie_name) @property def logged_in(self): """Is a user currently logged in?""" user = self.get_current_user() return user and not user == "anonymous" @property def login_handler(self): """Return the login handler for this application, if any.""" return self.settings.get("login_handler_class", None) @property def token(self): """Return the login token for this application, if any.""" return self.settings.get("token", None) @property def login_available(self): """May a user proceed to log in? This returns True if login capability is available, irrespective of whether the user is already logged in or not. """ if self.login_handler is None: return False return bool(self.login_handler.get_login_available(self.settings)) class JupyterHandler(AuthenticatedHandler): """Jupyter-specific extensions to authenticated handling Mostly property shortcuts to Jupyter-specific settings. """ @property def config(self): return self.settings.get("config", None) @property def log(self): """use the Jupyter log by default, falling back on tornado's logger""" return log() @property def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" return self.settings.get("jinja_template_vars", {}) @property def serverapp(self): return self.settings["serverapp"] # --------------------------------------------------------------- # URLs # --------------------------------------------------------------- @property def version_hash(self): """The version hash to use for cache hints for static files""" return self.settings.get("version_hash", "") @property def mathjax_url(self): url = self.settings.get("mathjax_url", "") if not url or url_is_absolute(url): return url return url_path_join(self.base_url, url) @property def mathjax_config(self): return self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe") @property def base_url(self): return self.settings.get("base_url", "/") @property def default_url(self): return self.settings.get("default_url", "") @property def ws_url(self): return self.settings.get("websocket_url", "") @property def contents_js_source(self): self.log.debug( "Using contents: %s", self.settings.get("contents_js_source", "services/contents") ) return self.settings.get("contents_js_source", "services/contents") # --------------------------------------------------------------- # Manager objects # --------------------------------------------------------------- @property def kernel_manager(self): return self.settings["kernel_manager"] @property def contents_manager(self): return self.settings["contents_manager"] @property def session_manager(self): return self.settings["session_manager"] @property def terminal_manager(self): return self.settings["terminal_manager"] @property def kernel_spec_manager(self): return self.settings["kernel_spec_manager"] @property def config_manager(self): return self.settings["config_manager"] # --------------------------------------------------------------- # CORS # --------------------------------------------------------------- @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get("allow_origin", "") @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get("allow_origin_pat", None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get("allow_credentials", False) def set_default_headers(self): """Add CORS headers, if defined""" super(JupyterHandler, self).set_default_headers() if self.allow_origin: self.set_header("Access-Control-Allow-Origin", self.allow_origin) elif self.allow_origin_pat: origin = self.get_origin() if origin and re.match(self.allow_origin_pat, origin): self.set_header("Access-Control-Allow-Origin", origin) elif self.token_authenticated and "Access-Control-Allow-Origin" not in self.settings.get( "headers", {} ): # allow token-authenticated requests cross-origin by default. # only apply this exception if allow-origin has not been specified. self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin", "")) if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", "true") def set_attachment_header(self, filename): """Set Content-Disposition: attachment header As a method to ensure handling of filename encoding """ escaped_filename = url_escape(filename) self.set_header( "Content-Disposition", "attachment;" " filename*=utf-8''{utf8}".format( utf8=escaped_filename, ), ) def get_origin(self): # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". if "Origin" in self.request.headers: origin = self.request.headers.get("Origin") else: origin = self.request.headers.get("Sec-Websocket-Origin", None) return origin # origin_to_satisfy_tornado is present because tornado requires # check_origin to take an origin argument, but we don't use it def check_origin(self, origin_to_satisfy_tornado=""): """Check Origin for cross-site API requests, including websockets Copied from WebSocket with changes: - allow unspecified host/origin (e.g. scripts) - allow token-authenticated requests """ if self.allow_origin == "*" or self.skip_check_origin(): return True host = self.request.headers.get("Host") origin = self.request.headers.get("Origin") # If no header is provided, let the request through. # Origin can be None for: # - same-origin (IE, Firefox) # - Cross-site POST form (IE, Firefox) # - Scripts # The cross-site POST (XSRF) case is handled by tornado's xsrf_token if origin is None or host is None: return True origin = origin.lower() origin_host = urlparse(origin).netloc # OK if origin matches host if origin_host == host: return True # Check CORS headers if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: allow = bool(re.match(self.allow_origin_pat, origin)) else: # No CORS headers deny the request allow = False if not allow: self.log.warning( "Blocking Cross Origin API request for %s. Origin: %s, Host: %s", self.request.path, origin, host, ) return allow def check_referer(self): """Check Referer for cross-site requests. Disables requests to certain endpoints with external or missing Referer. If set, allow_origin settings are applied to the Referer to whitelist specific cross-origin sites. Used on GET for api endpoints and /files/ to block cross-site inclusion (XSSI). """ if self.allow_origin == "*" or self.skip_check_origin(): return True host = self.request.headers.get("Host") referer = self.request.headers.get("Referer") if not host: self.log.warning("Blocking request with no host") return False if not referer: self.log.warning("Blocking request with no referer") return False referer_url = urlparse(referer) referer_host = referer_url.netloc if referer_host == host: return True # apply cross-origin checks to Referer: origin = "{}://{}".format(referer_url.scheme, referer_url.netloc) if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: allow = bool(re.match(self.allow_origin_pat, origin)) else: # No CORS settings, deny the request allow = False if not allow: self.log.warning( "Blocking Cross Origin request for %s. Referer: %s, Host: %s", self.request.path, origin, host, ) return allow def check_xsrf_cookie(self): """Bypass xsrf cookie checks when token-authenticated""" if self.token_authenticated or self.settings.get("disable_check_xsrf", False): # Token-authenticated requests do not need additional XSRF-check # Servers without authentication are vulnerable to XSRF return try: return super(JupyterHandler, self).check_xsrf_cookie() except web.HTTPError as e: if self.request.method in {"GET", "HEAD"}: # Consider Referer a sufficient cross-origin check for GET requests if not self.check_referer(): referer = self.request.headers.get("Referer") if referer: msg = "Blocking Cross Origin request from {}.".format(referer) else: msg = "Blocking request from unknown origin" raise web.HTTPError(403, msg) else: raise def check_host(self): """Check the host header if remote access disallowed. Returns True if the request should continue, False otherwise. """ if self.settings.get("allow_remote_access", False): return True # Remove port (e.g. ':8888') from host host = re.match(r"^(.*?)(:\d+)?$", self.request.host).group(1) # Browsers format IPv6 addresses like [::1]; we need to remove the [] if host.startswith("[") and host.endswith("]"): host = host[1:-1] # UNIX socket handling check_host = urldecode_unix_socket_path(host) if check_host.startswith("/") and os.path.exists(check_host): allow = True else: try: addr = ipaddress.ip_address(host) except ValueError: # Not an IP address: check against hostnames allow = host in self.settings.get("local_hostnames", ["localhost"]) else: allow = addr.is_loopback if not allow: self.log.warning( ( "Blocking request with non-local 'Host' %s (%s). " "If the server should be accessible at that name, " "set ServerApp.allow_remote_access to disable the check." ), host, self.request.host, ) return allow def prepare(self): if not self.check_host(): raise web.HTTPError(403) return super(JupyterHandler, self).prepare() # --------------------------------------------------------------- # template rendering # --------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" return self.settings["jinja2_env"].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) @property def template_namespace(self): return dict( base_url=self.base_url, default_url=self.default_url, ws_url=self.ws_url, logged_in=self.logged_in, allow_password_change=self.settings.get("allow_password_change"), login_available=self.login_available, token_available=bool(self.token), static_url=self.static_url, sys_info=json_sys_info(), contents_js_source=self.contents_js_source, version_hash=self.version_hash, xsrf_form_html=self.xsrf_form_html, token=self.token, xsrf_token=self.xsrf_token.decode("utf8"), nbjs_translations=json.dumps( combine_translations(self.request.headers.get("Accept-Language", "")) ), **self.jinja_template_vars ) def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? body = self.request.body.strip().decode(u"utf-8") try: model = json.loads(body) except Exception as e: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, u"Invalid JSON in body of request") from e return model def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get("exc_info") message = "" status_message = responses.get(status_code, "Unknown HTTP Error") exception = "(unknown)" if exc_info: exception = exc_info[1] # get the custom message, if defined try: message = exception.log_message % exception.args except Exception: pass # construct the custom reason, if defined reason = getattr(exception, "reason", "") if reason: status_message = reason # build template namespace ns = dict( status_code=status_code, status_message=status_message, message=message, exception=exception, ) self.set_header("Content-Type", "text/html") # render the template try: html = self.render_template("%s.html" % status_code, **ns) except TemplateNotFound: html = self.render_template("error.html", **ns) self.write(html) class APIHandler(JupyterHandler): """Base class for API handlers""" def prepare(self): if not self.check_origin(): raise web.HTTPError(404) return super(APIHandler, self).prepare() def write_error(self, status_code, **kwargs): """APIHandler errors are JSON, not human pages""" self.set_header("Content-Type", "application/json") message = responses.get(status_code, "Unknown HTTP Error") reply = { "message": message, } exc_info = kwargs.get("exc_info") if exc_info: e = exc_info[1] if isinstance(e, HTTPError): reply["message"] = e.log_message or message reply["reason"] = e.reason else: reply["message"] = "Unhandled error" reply["reason"] = None reply["traceback"] = "".join(traceback.format_exception(*exc_info)) self.log.warning(reply["message"]) self.finish(json.dumps(reply)) def get_current_user(self): """Raise 403 on API handlers instead of redirecting to human login page""" # preserve _user_cache so we don't raise more than once if hasattr(self, "_user_cache"): return self._user_cache self._user_cache = user = super(APIHandler, self).get_current_user() return user def get_login_url(self): # if get_login_url is invoked in an API handler, # that means @web.authenticated is trying to trigger a redirect. # instead of redirecting, raise 403 instead. if not self.current_user: raise web.HTTPError(403) return super(APIHandler, self).get_login_url() @property def content_security_policy(self): csp = "; ".join( [ super(APIHandler, self).content_security_policy, "default-src 'none'", ] ) return csp # set _track_activity = False on API handlers that shouldn't track activity _track_activity = True def update_api_activity(self): """Update last_activity of API requests""" # record activity of authenticated requests if ( self._track_activity and getattr(self, "_user_cache", None) and self.get_argument("no_track_activity", None) is None ): self.settings["api_last_activity"] = utcnow() def finish(self, *args, **kwargs): self.update_api_activity() self.set_header("Content-Type", "application/json") return super(APIHandler, self).finish(*args, **kwargs) def options(self, *args, **kwargs): if "Access-Control-Allow-Headers" in self.settings.get("headers", {}): self.set_header( "Access-Control-Allow-Headers", self.settings["headers"]["Access-Control-Allow-Headers"], ) else: self.set_header( "Access-Control-Allow-Headers", "accept, content-type, authorization, x-xsrftoken" ) self.set_header("Access-Control-Allow-Methods", "GET, PUT, POST, PATCH, DELETE, OPTIONS") # if authorization header is requested, # that means the request is token-authenticated. # avoid browser-side rejection of the preflight request. # only allow this exception if allow_origin has not been specified # and Jupyter server authentication is enabled. # If the token is not valid, the 'real' request will still be rejected. requested_headers = self.request.headers.get("Access-Control-Request-Headers", "").split( "," ) if ( requested_headers and any(h.strip().lower() == "authorization" for h in requested_headers) and ( # FIXME: it would be even better to check specifically for token-auth, # but there is currently no API for this. self.login_available ) and ( self.allow_origin or self.allow_origin_pat or "Access-Control-Allow-Origin" in self.settings.get("headers", {}) ) ): self.set_header("Access-Control-Allow-Origin", self.request.headers.get("Origin", "")) class Template404(JupyterHandler): """Render our 404 template""" def prepare(self): raise web.HTTPError(404) class AuthenticatedFileHandler(JupyterHandler, web.StaticFileHandler): """static files should only be accessible when logged in""" @property def content_security_policy(self): # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the Jupyter server. return ( super(AuthenticatedFileHandler, self).content_security_policy + "; sandbox allow-scripts" ) @web.authenticated def head(self, path): self.check_xsrf_cookie() return super(AuthenticatedFileHandler, self).head(path) @web.authenticated def get(self, path): if os.path.splitext(path)[1] == ".ipynb" or self.get_argument("download", False): name = path.rsplit("/", 1)[-1] self.set_attachment_header(name) return web.StaticFileHandler.get(self, path) def get_content_type(self): path = self.absolute_path.strip("/") if "/" in path: _, name = path.rsplit("/", 1) else: name = path if name.endswith(".ipynb"): return "application/x-ipynb+json" else: cur_mime = mimetypes.guess_type(name)[0] if cur_mime == "text/plain": return "text/plain; charset=UTF-8" else: return super(AuthenticatedFileHandler, self).get_content_type() def set_headers(self): super(AuthenticatedFileHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments: self.add_header("Cache-Control", "no-cache") def compute_etag(self): return None def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. Requires tornado 3.1 Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root) and not self.contents_manager.allow_hidden: self.log.info( "Refusing to serve hidden file, via 404 Error, use flag 'ContentsManager.allow_hidden' to enable" ) raise web.HTTPError(404) return abs_path def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. This should be used on any JSON API on any handler method that can raise HTTPErrors. This will grab the latest HTTPError exception using sys.exc_info and then: 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. """ warnings.warn( "@json_errors is deprecated in notebook 5.2.0. Subclass APIHandler instead.", DeprecationWarning, stacklevel=2, ) @functools.wraps(method) def wrapper(self, *args, **kwargs): self.write_error = types.MethodType(APIHandler.write_error, self) return method(self, *args, **kwargs) return wrapper # ----------------------------------------------------------------------------- # File handler # ----------------------------------------------------------------------------- # to minimize subclass changes: HTTPError = web.HTTPError class FileFindHandler(JupyterHandler, web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" # cache search results, don't search for files more than once _static_paths = {} def set_headers(self): super(FileFindHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments or any( self.request.path.startswith(path) for path in self.no_cache_paths ): self.set_header("Cache-Control", "no-cache") def initialize(self, path, default_filename=None, no_cache_paths=None): self.no_cache_paths = no_cache_paths or [] if isinstance(path, str): path = [path] self.root = tuple(os.path.abspath(os.path.expanduser(p)) + os.sep for p in path) self.default_filename = default_filename def compute_etag(self): return None @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" with cls._lock: if path in cls._static_paths: return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found return "" cls._static_paths[path] = abspath log().debug("Path %s served from %s" % (path, abspath)) return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == "": raise web.HTTPError(404) for root in self.root: if (absolute_path + os.sep).startswith(root): break return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) class APIVersionHandler(APIHandler): def get(self): # not authenticated, so give as few info as possible self.finish(json.dumps({"version": jupyter_server.__version__})) class TrailingSlashHandler(web.RequestHandler): """Simple redirect handler that strips trailing slashes This should be the first, highest priority handler. """ def get(self): path, *rest = self.request.uri.partition("?") # trim trailing *and* leading / # to avoid misinterpreting repeated '//' path = "/" + path.strip("/") new_uri = "".join([path, *rest]) self.redirect(new_uri) post = put = get class MainHandler(JupyterHandler): """Simple handler for base_url.""" def get(self): html = self.render_template("main.html") self.write(html) post = put = get class FilesRedirectHandler(JupyterHandler): """Handler for redirecting relative URLs to the /files/ handler""" @staticmethod async def redirect_to_files(self, path): """make redirect logic a reusable static method so it can be called from other handlers. """ cm = self.contents_manager if await ensure_async(cm.dir_exists(path)): # it's a *directory*, redirect to /tree url = url_path_join(self.base_url, "tree", url_escape(path)) else: orig_path = path # otherwise, redirect to /files parts = path.split("/") if not await ensure_async(cm.file_exists(path=path)) and "files" in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warning("Deprecated files/ URL: %s", orig_path) parts.remove("files") path = "/".join(parts) if not await ensure_async(cm.file_exists(path=path)): raise web.HTTPError(404) url = url_path_join(self.base_url, "files", url_escape(path)) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) def get(self, path=""): return self.redirect_to_files(self, path) class RedirectWithParams(web.RequestHandler): """Sam as web.RedirectHandler, but preserves URL parameters""" def initialize(self, url, permanent=True): self._url = url self._permanent = permanent def get(self): sep = "&" if "?" in self._url else "?" url = sep.join([self._url, self.request.query]) self.redirect(url, permanent=self._permanent) class PrometheusMetricsHandler(JupyterHandler): """ Return prometheus metrics for this notebook server """ def get(self): if self.settings["authenticate_prometheus"] and not self.logged_in: raise web.HTTPError(403) self.set_header("Content-Type", prometheus_client.CONTENT_TYPE_LATEST) self.write(prometheus_client.generate_latest(prometheus_client.REGISTRY)) # ----------------------------------------------------------------------------- # URL pattern fragments for re-use # ----------------------------------------------------------------------------- # path matches any number of `/foo[/bar...]` or just `/` or '' path_regex = r"(?P(?:(?:/[^/]+)+|/?))" # ----------------------------------------------------------------------------- # URL to handler mappings # ----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler), (r"api", APIVersionHandler), (r"/(robots\.txt|favicon\.ico)", web.StaticFileHandler), (r"/metrics", PrometheusMetricsHandler), ] jupyter_server-1.13.1/jupyter_server/base/zmqhandlers.py000066400000000000000000000233621415445537200236110ustar00rootroot00000000000000# coding: utf-8 """Tornado handlers for WebSocket <-> ZMQ sockets.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json import re import struct import sys from urllib.parse import urlparse import tornado from ipython_genutils.py3compat import cast_unicode try: from jupyter_client.jsonutil import json_default except ImportError: from jupyter_client.jsonutil import date_default as json_default from jupyter_client.jsonutil import extract_dates from jupyter_client.session import Session from tornado import ioloop from tornado import web from tornado.websocket import WebSocketHandler from .handlers import JupyterHandler def serialize_binary_message(msg): """serialize a message as a binary blob Header: 4 bytes: number of msg parts (nbufs) as 32b int 4 * nbufs bytes: offset for each buffer as integer as 32b int Offsets are from the start of the buffer, including the header. Returns ------- The message serialized to bytes. """ # don't modify msg or buffer list in-place msg = msg.copy() buffers = list(msg.pop("buffers")) if sys.version_info < (3, 4): buffers = [x.tobytes() for x in buffers] bmsg = json.dumps(msg, default=json_default).encode("utf8") buffers.insert(0, bmsg) nbufs = len(buffers) offsets = [4 * (nbufs + 1)] for buf in buffers[:-1]: offsets.append(offsets[-1] + len(buf)) offsets_buf = struct.pack("!" + "I" * (nbufs + 1), nbufs, *offsets) buffers.insert(0, offsets_buf) return b"".join(buffers) def deserialize_binary_message(bmsg): """deserialize a message from a binary blog Header: 4 bytes: number of msg parts (nbufs) as 32b int 4 * nbufs bytes: offset for each buffer as integer as 32b int Offsets are from the start of the buffer, including the header. Returns ------- message dictionary """ nbufs = struct.unpack("!i", bmsg[:4])[0] offsets = list(struct.unpack("!" + "I" * nbufs, bmsg[4 : 4 * (nbufs + 1)])) offsets.append(None) bufs = [] for start, stop in zip(offsets[:-1], offsets[1:]): bufs.append(bmsg[start:stop]) msg = json.loads(bufs[0].decode("utf8")) msg["header"] = extract_dates(msg["header"]) msg["parent_header"] = extract_dates(msg["parent_header"]) msg["buffers"] = bufs[1:] return msg # ping interval for keeping websockets alive (30 seconds) WS_PING_INTERVAL = 30000 class WebSocketMixin(object): """Mixin for common websocket options""" ping_callback = None last_ping = 0 last_pong = 0 stream = None @property def ping_interval(self): """The interval for websocket keep-alive pings. Set ws_ping_interval = 0 to disable pings. """ return self.settings.get("ws_ping_interval", WS_PING_INTERVAL) @property def ping_timeout(self): """If no ping is received in this many milliseconds, close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). Default is max of 3 pings or 30 seconds. """ return self.settings.get("ws_ping_timeout", max(3 * self.ping_interval, WS_PING_INTERVAL)) def check_origin(self, origin=None): """Check Origin == Host or Access-Control-Allow-Origin. Tornado >= 4 calls this method automatically, raising 403 if it returns False. """ if self.allow_origin == "*" or ( hasattr(self, "skip_check_origin") and self.skip_check_origin() ): return True host = self.request.headers.get("Host") if origin is None: origin = self.get_origin() # If no origin or host header is provided, assume from script if origin is None or host is None: return True origin = origin.lower() origin_host = urlparse(origin).netloc # OK if origin matches host if origin_host == host: return True # Check CORS headers if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: allow = bool(re.match(self.allow_origin_pat, origin)) else: # No CORS headers deny the request allow = False if not allow: self.log.warning( "Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", origin, host, ) return allow def clear_cookie(self, *args, **kwargs): """meaningless for websockets""" pass def open(self, *args, **kwargs): self.log.debug("Opening websocket %s", self.request.path) # start the pinging if self.ping_interval > 0: loop = ioloop.IOLoop.current() self.last_ping = loop.time() # Remember time of last ping self.last_pong = self.last_ping self.ping_callback = ioloop.PeriodicCallback( self.send_ping, self.ping_interval, ) self.ping_callback.start() return super(WebSocketMixin, self).open(*args, **kwargs) def send_ping(self): """send a ping to keep the websocket alive""" if self.ws_connection is None and self.ping_callback is not None: self.ping_callback.stop() return if self.ws_connection.client_terminated: self.close() return # check for timeout on pong. Make sure that we really have sent a recent ping in # case the machine with both server and client has been suspended since the last ping. now = ioloop.IOLoop.current().time() since_last_pong = 1e3 * (now - self.last_pong) since_last_ping = 1e3 * (now - self.last_ping) if since_last_ping < 2 * self.ping_interval and since_last_pong > self.ping_timeout: self.log.warning("WebSocket ping timeout after %i ms.", since_last_pong) self.close() return self.ping(b"") self.last_ping = now def on_pong(self, data): self.last_pong = ioloop.IOLoop.current().time() class ZMQStreamHandler(WebSocketMixin, WebSocketHandler): if tornado.version_info < (4, 1): """Backport send_error from tornado 4.1 to 4.0""" def send_error(self, *args, **kwargs): if self.stream is None: super(WebSocketHandler, self).send_error(*args, **kwargs) else: # If we get an uncaught exception during the handshake, # we have no choice but to abruptly close the connection. # TODO: for uncaught exceptions after the handshake, # we can close the connection more gracefully. self.stream.close() def _reserialize_reply(self, msg_or_list, channel=None): """Reserialize a reply message using JSON. msg_or_list can be an already-deserialized msg dict or the zmq buffer list. If it is the zmq list, it will be deserialized with self.session. This takes the msg list from the ZMQ socket and serializes the result for the websocket. This method should be used by self._on_zmq_reply to build messages that can be sent back to the browser. """ if isinstance(msg_or_list, dict): # already unpacked msg = msg_or_list else: idents, msg_list = self.session.feed_identities(msg_or_list) msg = self.session.deserialize(msg_list) if channel: msg["channel"] = channel if msg["buffers"]: buf = serialize_binary_message(msg) return buf else: smsg = json.dumps(msg, default=json_default) return cast_unicode(smsg) def _on_zmq_reply(self, stream, msg_list): # Sometimes this gets triggered when the on_close method is scheduled in the # eventloop but hasn't been called. if self.ws_connection is None or stream.closed(): self.log.warning("zmq message arrived on closed channel") self.close() return channel = getattr(stream, "channel", None) try: msg = self._reserialize_reply(msg_list, channel=channel) except Exception: self.log.critical("Malformed message: %r" % msg_list, exc_info=True) else: self.write_message(msg, binary=isinstance(msg, bytes)) class AuthenticatedZMQStreamHandler(ZMQStreamHandler, JupyterHandler): def set_default_headers(self): """Undo the set_default_headers in JupyterHandler which doesn't make sense for websockets """ pass def pre_get(self): """Run before finishing the GET request Extend this method to add logic that should fire before the websocket finishes completing. """ # authenticate the request before opening the websocket if self.get_current_user() is None: self.log.warning("Couldn't authenticate WebSocket connection") raise web.HTTPError(403) if self.get_argument("session_id", False): self.session.session = cast_unicode(self.get_argument("session_id")) else: self.log.warning("No session ID specified") async def get(self, *args, **kwargs): # pre_get can be a coroutine in subclasses # assign and yield in two step to avoid tornado 3 issues res = self.pre_get() await res res = super(AuthenticatedZMQStreamHandler, self).get(*args, **kwargs) await res def initialize(self): self.log.debug("Initializing websocket connection %s", self.request.path) self.session = Session(config=self.config) def get_compression_options(self): return self.settings.get("websocket_compression_options", None) jupyter_server-1.13.1/jupyter_server/config_manager.py000066400000000000000000000111741415445537200233040ustar00rootroot00000000000000# coding: utf-8 """Manager to read and modify config data in JSON files.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import copy import errno import glob import io import json import os from six import PY3 from traitlets.config import LoggingConfigurable from traitlets.traitlets import Bool from traitlets.traitlets import Unicode def recursive_update(target, new): """Recursively update one dictionary using another. None values will delete their keys. """ for k, v in new.items(): if isinstance(v, dict): if k not in target: target[k] = {} recursive_update(target[k], v) if not target[k]: # Prune empty subdicts del target[k] elif v is None: target.pop(k, None) else: target[k] = v def remove_defaults(data, defaults): """Recursively remove items from dict that are already in defaults""" # copy the iterator, since data will be modified for key, value in list(data.items()): if key in defaults: if isinstance(value, dict): remove_defaults(data[key], defaults[key]) if not data[key]: # prune empty subdicts del data[key] else: if value == defaults[key]: del data[key] class BaseJSONConfigManager(LoggingConfigurable): """General JSON config manager Deals with persisting/storing config in a json file with optionally default values in a {section_name}.d directory. """ config_dir = Unicode(".") read_directory = Bool(True) def ensure_config_dir_exists(self): """Will try to create the config_dir directory.""" try: os.makedirs(self.config_dir, 0o755) except OSError as e: if e.errno != errno.EEXIST: raise def file_name(self, section_name): """Returns the json filename for the section_name: {config_dir}/{section_name}.json""" return os.path.join(self.config_dir, section_name + ".json") def directory(self, section_name): """Returns the directory name for the section name: {config_dir}/{section_name}.d""" return os.path.join(self.config_dir, section_name + ".d") def get(self, section_name, include_root=True): """Retrieve the config data for the specified section. Returns the data as a dictionary, or an empty dictionary if the file doesn't exist. When include_root is False, it will not read the root .json file, effectively returning the default values. """ paths = [self.file_name(section_name)] if include_root else [] if self.read_directory: pattern = os.path.join(self.directory(section_name), "*.json") # These json files should be processed first so that the # {section_name}.json take precedence. # The idea behind this is that installing a Python package may # put a json file somewhere in the a .d directory, while the # .json file is probably a user configuration. paths = sorted(glob.glob(pattern)) + paths self.log.debug( "Paths used for configuration of %s: \n\t%s", section_name, "\n\t".join(paths) ) data = {} for path in paths: if os.path.isfile(path): with io.open(path, encoding="utf-8") as f: recursive_update(data, json.load(f)) return data def set(self, section_name, data): """Store the given config data.""" filename = self.file_name(section_name) self.ensure_config_dir_exists() if self.read_directory: # we will modify data in place, so make a copy data = copy.deepcopy(data) defaults = self.get(section_name, include_root=False) remove_defaults(data, defaults) # Generate the JSON up front, since it could raise an exception, # in order to avoid writing half-finished corrupted data to disk. json_content = json.dumps(data, indent=2) if PY3: f = io.open(filename, "w", encoding="utf-8") else: f = open(filename, "wb") with f: f.write(json_content) def update(self, section_name, new_data): """Modify the config section by recursively updating it with new_data. Returns the modified config data as a dictionary. """ data = self.get(section_name) recursive_update(data, new_data) self.set(section_name, data) return data jupyter_server-1.13.1/jupyter_server/conftest.py000066400000000000000000000016231415445537200221700ustar00rootroot00000000000000import pytest pytest_plugins = ["jupyter_server.pytest_plugin"] def pytest_addoption(parser): parser.addoption( "--integration_tests", default=False, type=bool, help="only run tests with the 'integration_test' pytest mark.", ) def pytest_configure(config): # register an additional marker config.addinivalue_line("markers", "integration_test") def pytest_runtest_setup(item): is_integration_test = any(mark for mark in item.iter_markers(name="integration_test")) if item.config.getoption("--integration_tests") is True: if not is_integration_test: pytest.skip("Only running tests marked as 'integration_test'.") else: if is_integration_test: pytest.skip( "Skipping this test because it's marked 'integration_test'. Run integration tests using the `--integration_tests` flag." ) jupyter_server-1.13.1/jupyter_server/extension/000077500000000000000000000000001415445537200220035ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/extension/__init__.py000066400000000000000000000000001415445537200241020ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/extension/application.py000066400000000000000000000510771415445537200246720ustar00rootroot00000000000000import logging import re import sys from jinja2 import Environment from jinja2 import FileSystemLoader from jupyter_core.application import JupyterApp from jupyter_core.application import NoStart from tornado.log import LogFormatter from tornado.web import RedirectHandler from traitlets import Bool from traitlets import default from traitlets import Dict from traitlets import HasTraits from traitlets import List from traitlets import Unicode from traitlets.config import Config from .handler import ExtensionHandlerMixin from jupyter_server.serverapp import ServerApp from jupyter_server.transutils import _i18n from jupyter_server.utils import is_namespace_package from jupyter_server.utils import url_path_join # ----------------------------------------------------------------------------- # Util functions and classes. # ----------------------------------------------------------------------------- def _preparse_for_subcommand(Application, argv): """Preparse command line to look for subcommands.""" # Read in arguments from command line. if len(argv) == 0: return # Find any subcommands. if Application.subcommands and len(argv) > 0: # we have subcommands, and one may have been specified subc, subargv = argv[0], argv[1:] if re.match(r"^\w(\-?\w)*$", subc) and subc in Application.subcommands: # it's a subcommand, and *not* a flag or class parameter app = Application() app.initialize_subcommand(subc, subargv) return app.subapp def _preparse_for_stopping_flags(Application, argv): """Looks for 'help', 'version', and 'generate-config; commands in command line. If found, raises the help and version of current Application. This is useful for traitlets applications that have to parse the command line multiple times, but want to control when when 'help' and 'version' is raised. """ # Arguments after a '--' argument are for the script IPython may be # about to run, not IPython iteslf. For arguments parsed here (help and # version), we want to only search the arguments up to the first # occurrence of '--', which we're calling interpreted_argv. try: interpreted_argv = argv[: argv.index("--")] except ValueError: interpreted_argv = argv # Catch any help calls. if any(x in interpreted_argv for x in ("-h", "--help-all", "--help")): app = Application() app.print_help("--help-all" in interpreted_argv) app.exit(0) # Catch version commands if "--version" in interpreted_argv or "-V" in interpreted_argv: app = Application() app.print_version() app.exit(0) # Catch generate-config commands. if "--generate-config" in interpreted_argv: app = Application() app.write_default_config() app.exit(0) class ExtensionAppJinjaMixin(HasTraits): """Use Jinja templates for HTML templates on top of an ExtensionApp.""" jinja2_options = Dict( help=_i18n( """Options to pass to the jinja2 environment for this """ ) ).tag(config=True) def _prepare_templates(self): # Get templates defined in a subclass. self.initialize_templates() # Add templates to web app settings if extension has templates. if len(self.template_paths) > 0: self.settings.update({"{}_template_paths".format(self.name): self.template_paths}) # Create a jinja environment for logging html templates. self.jinja2_env = Environment( loader=FileSystemLoader(self.template_paths), extensions=["jinja2.ext.i18n"], autoescape=True, **self.jinja2_options ) # Add the jinja2 environment for this extension to the tornado settings. self.settings.update({"{}_jinja2_env".format(self.name): self.jinja2_env}) # ----------------------------------------------------------------------------- # ExtensionApp # ----------------------------------------------------------------------------- class JupyterServerExtensionException(Exception): """Exception class for raising for Server extensions errors.""" # ----------------------------------------------------------------------------- # ExtensionApp # ----------------------------------------------------------------------------- class ExtensionApp(JupyterApp): """Base class for configurable Jupyter Server Extension Applications. ExtensionApp subclasses can be initialized two ways: 1. Extension is listed as a jpserver_extension, and ServerApp calls its load_jupyter_server_extension classmethod. This is the classic way of loading a server extension. 2. Extension is launched directly by calling its `launch_instance` class method. This method can be set as a entry_point in the extensions setup.py """ # Subclasses should override this trait. Tells the server if # this extension allows other other extensions to be loaded # side-by-side when launched directly. load_other_extensions = True # A useful class property that subclasses can override to # configure the underlying Jupyter Server when this extension # is launched directly (using its `launch_instance` method). serverapp_config = {} # Some subclasses will likely override this trait to flip # the default value to False if they don't offer a browser # based frontend. open_browser = Bool( help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (ServerApp.browser) configuration option. """ ).tag(config=True) @default("open_browser") def _default_open_browser(self): return self.serverapp.config["ServerApp"].get("open_browser", True) # The extension name used to name the jupyter config # file, jupyter_{name}_config. # This should also match the jupyter subcommand used to launch # this extension from the CLI, e.g. `jupyter {name}`. name = None @classmethod def get_extension_package(cls): parts = cls.__module__.split(".") if is_namespace_package(parts[0]): # in this case the package name is `.`. return ".".join(parts[0:2]) return parts[0] @classmethod def get_extension_point(cls): return cls.__module__ # Extension URL sets the default landing page for this extension. extension_url = "/" default_url = Unicode().tag(config=True) @default("default_url") def _default_url(self): return self.extension_url file_url_prefix = Unicode("notebooks") # Is this linked to a serverapp yet? _linked = Bool(False) # Extension can configure the ServerApp from the command-line classes = [ ServerApp, ] # A ServerApp is not defined yet, but will be initialized below. serverapp = None _log_formatter_cls = LogFormatter @default("log_level") def _default_log_level(self): return logging.INFO @default("log_format") def _default_log_format(self): """override default log format to include date & time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" static_url_prefix = Unicode( help="""Url where the static assets for the extension are served.""" ).tag(config=True) @default("static_url_prefix") def _default_static_url_prefix(self): static_url = "static/{name}/".format(name=self.name) return url_path_join(self.serverapp.base_url, static_url) static_paths = List( Unicode(), help="""paths to search for serving static files. This allows adding javascript/css to be available from the notebook server machine, or overriding individual files in the IPython """, ).tag(config=True) template_paths = List( Unicode(), help=_i18n( """Paths to search for serving jinja templates. Can be used to override templates from notebook.templates.""" ), ).tag(config=True) settings = Dict(help=_i18n("""Settings that will passed to the server.""")).tag(config=True) handlers = List(help=_i18n("""Handlers appended to the server.""")).tag(config=True) def _config_file_name_default(self): """The default config file name.""" if not self.name: return "" return "jupyter_{}_config".format(self.name.replace("-", "_")) def initialize_settings(self): """Override this method to add handling of settings.""" pass def initialize_handlers(self): """Override this method to append handlers to a Jupyter Server.""" pass def initialize_templates(self): """Override this method to add handling of template files.""" pass def _prepare_config(self): """Builds a Config object from the extension's traits and passes the object to the webapp's settings as `_config`. """ traits = self.class_own_traits().keys() self.extension_config = Config({t: getattr(self, t) for t in traits}) self.settings["{}_config".format(self.name)] = self.extension_config def _prepare_settings(self): # Make webapp settings accessible to initialize_settings method webapp = self.serverapp.web_app self.settings.update(**webapp.settings) # Add static and template paths to settings. self.settings.update( { "{}_static_paths".format(self.name): self.static_paths, "{}".format(self.name): self, } ) # Get setting defined by subclass using initialize_settings method. self.initialize_settings() # Update server settings with extension settings. webapp.settings.update(**self.settings) def _prepare_handlers(self): webapp = self.serverapp.web_app # Get handlers defined by extension subclass. self.initialize_handlers() # prepend base_url onto the patterns that we match new_handlers = [] for handler_items in self.handlers: # Build url pattern including base_url pattern = url_path_join(webapp.settings["base_url"], handler_items[0]) handler = handler_items[1] # Get handler kwargs, if given kwargs = {} if issubclass(handler, ExtensionHandlerMixin): kwargs["name"] = self.name try: kwargs.update(handler_items[2]) except IndexError: pass new_handler = (pattern, handler, kwargs) new_handlers.append(new_handler) # Add static endpoint for this extension, if static paths are given. if len(self.static_paths) > 0: # Append the extension's static directory to server handlers. static_url = url_path_join(self.static_url_prefix, "(.*)") # Construct handler. handler = ( static_url, webapp.settings["static_handler_class"], {"path": self.static_paths}, ) new_handlers.append(handler) webapp.add_handlers(".*$", new_handlers) def _prepare_templates(self): # Add templates to web app settings if extension has templates. if len(self.template_paths) > 0: self.settings.update({"{}_template_paths".format(self.name): self.template_paths}) self.initialize_templates() def _jupyter_server_config(self): base_config = { "ServerApp": { "default_url": self.default_url, "open_browser": self.open_browser, "file_url_prefix": self.file_url_prefix, } } base_config["ServerApp"].update(self.serverapp_config) return base_config def _link_jupyter_server_extension(self, serverapp): """Link the ExtensionApp to an initialized ServerApp. The ServerApp is stored as an attribute and config is exchanged between ServerApp and `self` in case the command line contains traits for the ExtensionApp or the ExtensionApp's config files have server settings. Note, the ServerApp has not initialized the Tornado Web Application yet, so do not try to affect the `web_app` attribute. """ self.serverapp = serverapp # Load config from an ExtensionApp's config files. self.load_config_file() # ServerApp's config might have picked up # config for the ExtensionApp. We call # update_config to update ExtensionApp's # traits with these values found in ServerApp's # config. # ServerApp config ---> ExtensionApp traits self.update_config(self.serverapp.config) # Use ExtensionApp's CLI parser to find any extra # args that passed through ServerApp and # now belong to ExtensionApp. self.parse_command_line(self.serverapp.extra_args) # If any config should be passed upstream to the # ServerApp, do it here. # i.e. ServerApp traits <--- ExtensionApp config self.serverapp.update_config(self.config) # Acknowledge that this extension has been linked. self._linked = True def initialize(self): """Initialize the extension app. The corresponding server app and webapp should already be initialized by this step. 1) Appends Handlers to the ServerApp, 2) Passes config and settings from ExtensionApp to the Tornado web application 3) Points Tornado Webapp to templates and static assets. """ if not self.serverapp: msg = ( "This extension has no attribute `serverapp`. " "Try calling `.link_to_serverapp()` before calling " "`.initialize()`." ) raise JupyterServerExtensionException(msg) self._prepare_config() self._prepare_templates() self._prepare_settings() self._prepare_handlers() def start(self): """Start the underlying Jupyter server. Server should be started after extension is initialized. """ super(ExtensionApp, self).start() # Start the server. self.serverapp.start() async def stop_extension(self): """Cleanup any resources managed by this extension.""" def stop(self): """Stop the underlying Jupyter server.""" self.serverapp.stop() self.serverapp.clear_instance() @classmethod def _load_jupyter_server_extension(cls, serverapp): """Initialize and configure this extension, then add the extension's settings and handlers to the server's web application. """ extension_manager = serverapp.extension_manager try: # Get loaded extension from serverapp. point = extension_manager.extension_points[cls.name] extension = point.app except KeyError: extension = cls() extension._link_jupyter_server_extension(serverapp) extension.initialize() return extension @classmethod def load_classic_server_extension(cls, serverapp): """Enables extension to be loaded as classic Notebook (jupyter/notebook) extension.""" extension = cls() extension.serverapp = serverapp extension.load_config_file() extension.update_config(serverapp.config) extension.parse_command_line(serverapp.extra_args) # Add redirects to get favicons from old locations in the classic notebook server extension.handlers.extend( [ ( r"/static/favicons/favicon.ico", RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/favicon.ico")}, ), ( r"/static/favicons/favicon-busy-1.ico", RedirectHandler, { "url": url_path_join( serverapp.base_url, "static/base/images/favicon-busy-1.ico" ) }, ), ( r"/static/favicons/favicon-busy-2.ico", RedirectHandler, { "url": url_path_join( serverapp.base_url, "static/base/images/favicon-busy-2.ico" ) }, ), ( r"/static/favicons/favicon-busy-3.ico", RedirectHandler, { "url": url_path_join( serverapp.base_url, "static/base/images/favicon-busy-3.ico" ) }, ), ( r"/static/favicons/favicon-file.ico", RedirectHandler, { "url": url_path_join( serverapp.base_url, "static/base/images/favicon-file.ico" ) }, ), ( r"/static/favicons/favicon-notebook.ico", RedirectHandler, { "url": url_path_join( serverapp.base_url, "static/base/images/favicon-notebook.ico" ) }, ), ( r"/static/favicons/favicon-terminal.ico", RedirectHandler, { "url": url_path_join( serverapp.base_url, "static/base/images/favicon-terminal.ico" ) }, ), ( r"/static/logo/logo.png", RedirectHandler, {"url": url_path_join(serverapp.base_url, "static/base/images/logo.png")}, ), ] ) extension.initialize() @classmethod def initialize_server(cls, argv=[], load_other_extensions=True, **kwargs): """Creates an instance of ServerApp and explicitly sets this extension to enabled=True (i.e. superceding disabling found in other config from files). The `launch_instance` method uses this method to initialize and start a server. """ jpserver_extensions = {cls.get_extension_package(): True} find_extensions = cls.load_other_extensions if "jpserver_extensions" in cls.serverapp_config: jpserver_extensions.update(cls.serverapp_config["jpserver_extensions"]) cls.serverapp_config["jpserver_extensions"] = jpserver_extensions find_extensions = False serverapp = ServerApp.instance(jpserver_extensions=jpserver_extensions, **kwargs) serverapp.aliases.update(cls.aliases) serverapp.initialize( argv=argv, starter_extension=cls.name, find_extensions=find_extensions, ) return serverapp @classmethod def launch_instance(cls, argv=None, **kwargs): """Launch the extension like an application. Initializes+configs a stock server and appends the extension to the server. Then starts the server and routes to extension's landing page. """ # Handle arguments. if argv is None: args = sys.argv[1:] # slice out extension config. else: args = argv # Handle all "stops" that could happen before # continuing to launch a server+extension. subapp = _preparse_for_subcommand(cls, args) if subapp: subapp.start() return # Check for help, version, and generate-config arguments # before initializing server to make sure these # arguments trigger actions from the extension not the server. _preparse_for_stopping_flags(cls, args) serverapp = cls.initialize_server(argv=args) # Log if extension is blocking other extensions from loading. if not cls.load_other_extensions: serverapp.log.info( "{ext_name} is running without loading " "other extensions.".format(ext_name=cls.name) ) # Start the server. try: serverapp.start() except NoStart: pass jupyter_server-1.13.1/jupyter_server/extension/config.py000066400000000000000000000022311415445537200236200ustar00rootroot00000000000000from jupyter_server.services.config.manager import ConfigManager DEFAULT_SECTION_NAME = "jupyter_server_config" class ExtensionConfigManager(ConfigManager): """A manager class to interface with Jupyter Server Extension config found in a `config.d` folder. It is assumed that all configuration files in this directory are JSON files. """ def get_jpserver_extensions(self, section_name=DEFAULT_SECTION_NAME): """Return the jpserver_extensions field from all config files found.""" data = self.get(section_name) return data.get("ServerApp", {}).get("jpserver_extensions", {}) def enabled(self, name, section_name=DEFAULT_SECTION_NAME, include_root=True): """Is the extension enabled?""" extensions = self.get_jpserver_extensions(section_name) try: return extensions[name] except KeyError: return False def enable(self, name): data = {"ServerApp": {"jpserver_extensions": {name: True}}} self.update(name, data) def disable(self, name): data = {"ServerApp": {"jpserver_extensions": {name: False}}} self.update(name, data) jupyter_server-1.13.1/jupyter_server/extension/handler.py000066400000000000000000000076071415445537200240040ustar00rootroot00000000000000from jinja2.exceptions import TemplateNotFound from jupyter_server.base.handlers import FileFindHandler class ExtensionHandlerJinjaMixin: """Mixin class for ExtensionApp handlers that use jinja templating for template rendering. """ def get_template(self, name): """Return the jinja template object for a given name""" try: env = "{}_jinja2_env".format(self.name) return self.settings[env].get_template(name) except TemplateNotFound: return super().get_template(name) class ExtensionHandlerMixin: """Base class for Jupyter server extension handlers. Subclasses can serve static files behind a namespaced endpoint: "/static//" This allows multiple extensions to serve static files under their own namespace and avoid intercepting requests for other extensions. """ def initialize(self, name): self.name = name @property def extensionapp(self): return self.settings[self.name] @property def serverapp(self): key = "serverapp" return self.settings[key] @property def log(self): if not hasattr(self, "name"): return super().log # Attempt to pull the ExtensionApp's log, otherwise fall back to ServerApp. try: return self.extensionapp.log except AttributeError: return self.serverapp.log @property def config(self): return self.settings["{}_config".format(self.name)] @property def server_config(self): return self.settings["config"] @property def base_url(self): return self.settings.get("base_url", "/") @property def static_url_prefix(self): return self.extensionapp.static_url_prefix @property def static_path(self): return self.settings["{}_static_paths".format(self.name)] def static_url(self, path, include_host=None, **kwargs): """Returns a static URL for the given relative static file path. This method requires you set the ``{name}_static_path`` setting in your extension (which specifies the root directory of your static files). This method returns a versioned url (by default appending ``?v=``), which allows the static files to be cached indefinitely. This can be disabled by passing ``include_version=False`` (in the default implementation; other static file implementations are not required to support this, but they may support other options). By default this method returns URLs relative to the current host, but if ``include_host`` is true the URL returned will be absolute. If this handler has an ``include_host`` attribute, that value will be used as the default for all `static_url` calls that do not pass ``include_host`` as a keyword argument. """ key = "{}_static_paths".format(self.name) try: self.require_setting(key, "static_url") except Exception as e: if key in self.settings: raise Exception( "This extension doesn't have any static paths listed. Check that the " "extension's `static_paths` trait is set." ) from e else: raise e get_url = self.settings.get("static_handler_class", FileFindHandler).make_static_url if include_host is None: include_host = getattr(self, "include_host", False) if include_host: base = self.request.protocol + "://" + self.request.host else: base = "" # Hijack settings dict to send extension templates to extension # static directory. settings = {"static_path": self.static_path, "static_url_prefix": self.static_url_prefix} return base + get_url(settings, path, **kwargs) jupyter_server-1.13.1/jupyter_server/extension/manager.py000066400000000000000000000323201415445537200237670ustar00rootroot00000000000000import importlib import sys import traceback from tornado.gen import multi from traitlets import Any from traitlets import Bool from traitlets import default from traitlets import Dict from traitlets import HasTraits from traitlets import Instance from traitlets import observe from traitlets import Unicode from traitlets import validate as validate_trait from traitlets.config import LoggingConfigurable from .config import ExtensionConfigManager from .utils import ExtensionMetadataError from .utils import ExtensionModuleNotFound from .utils import get_loader from .utils import get_metadata class ExtensionPoint(HasTraits): """A simple API for connecting to a Jupyter Server extension point defined by metadata and importable from a Python package. """ _linked = Bool(False) _app = Any(None, allow_none=True) metadata = Dict() @validate_trait("metadata") def _valid_metadata(self, proposed): metadata = proposed["value"] # Verify that the metadata has a "name" key. try: self._module_name = metadata["module"] except KeyError: raise ExtensionMetadataError( "There is no 'module' key in the extension's " "metadata packet." ) try: self._module = importlib.import_module(self._module_name) except ImportError: raise ExtensionModuleNotFound( "The submodule '{}' could not be found. Are you " "sure the extension is installed?".format(self._module_name) ) # If the metadata includes an ExtensionApp, create an instance. if "app" in metadata: self._app = metadata["app"]() return metadata @property def linked(self): """Has this extension point been linked to the server. Will pull from ExtensionApp's trait, if this point is an instance of ExtensionApp. """ if self.app: return self.app._linked return self._linked @property def app(self): """If the metadata includes an `app` field""" return self._app @property def config(self): """Return any configuration provided by this extension point.""" if self.app: return self.app._jupyter_server_config() # At some point, we might want to add logic to load config from # disk when extensions don't use ExtensionApp. else: return {} @property def module_name(self): """Name of the Python package module where the extension's _load_jupyter_server_extension can be found. """ return self._module_name @property def name(self): """Name of the extension. If it's not provided in the metadata, `name` is set to the extensions' module name. """ if self.app: return self.app.name return self.metadata.get("name", self.module_name) @property def module(self): """The imported module (using importlib.import_module)""" return self._module def _get_linker(self): if self.app: linker = self.app._link_jupyter_server_extension else: linker = getattr( self.module, # Search for a _link_jupyter_extension "_link_jupyter_server_extension", # Otherwise return a dummy function. lambda serverapp: None, ) return linker def _get_loader(self): loc = self.app if not loc: loc = self.module loader = get_loader(loc) return loader def validate(self): """Check that both a linker and loader exists.""" try: self._get_linker() self._get_loader() except Exception: return False else: return True def link(self, serverapp): """Link the extension to a Jupyter ServerApp object. This looks for a `_link_jupyter_server_extension` function in the extension's module or ExtensionApp class. """ if not self.linked: linker = self._get_linker() linker(serverapp) # Store this extension as already linked. self._linked = True def load(self, serverapp): """Load the extension in a Jupyter ServerApp object. This looks for a `_load_jupyter_server_extension` function in the extension's module or ExtensionApp class. """ loader = self._get_loader() return loader(serverapp) class ExtensionPackage(HasTraits): """An API for interfacing with a Jupyter Server extension package. Usage: ext_name = "my_extensions" extpkg = ExtensionPackage(name=ext_name) """ name = Unicode(help="Name of the an importable Python package.") enabled = Bool(False).tag(config=True) def __init__(self, *args, **kwargs): # Store extension points that have been linked. self._linked_points = {} super().__init__(*args, **kwargs) _linked_points = {} @validate_trait("name") def _validate_name(self, proposed): name = proposed["value"] self._extension_points = {} try: self._module, self._metadata = get_metadata(name) except ImportError: raise ExtensionModuleNotFound( "The module '{name}' could not be found. Are you " "sure the extension is installed?".format(name=name) ) # Create extension point interfaces for each extension path. for m in self._metadata: point = ExtensionPoint(metadata=m) self._extension_points[point.name] = point return name @property def module(self): """Extension metadata loaded from the extension package.""" return self._module @property def version(self): """Get the version of this package, if it's given. Otherwise, return an empty string""" return getattr(self._module, "__version__", "") @property def metadata(self): """Extension metadata loaded from the extension package.""" return self._metadata @property def extension_points(self): """A dictionary of extension points.""" return self._extension_points def validate(self): """Validate all extension points in this package.""" for extension in self.extension_points.values(): if not extension.validate(): return False return True def link_point(self, point_name, serverapp): linked = self._linked_points.get(point_name, False) if not linked: point = self.extension_points[point_name] point.link(serverapp) def load_point(self, point_name, serverapp): point = self.extension_points[point_name] return point.load(serverapp) def link_all_points(self, serverapp): for point_name in self.extension_points: self.link_point(point_name, serverapp) def load_all_points(self, serverapp): return [self.load_point(point_name, serverapp) for point_name in self.extension_points] class ExtensionManager(LoggingConfigurable): """High level interface for findind, validating, linking, loading, and managing Jupyter Server extensions. Usage: m = ExtensionManager(config_manager=...) """ config_manager = Instance(ExtensionConfigManager, allow_none=True) serverapp = Any() # Use Any to avoid circular import of Instance(ServerApp) @default("config_manager") def _load_default_config_manager(self): config_manager = ExtensionConfigManager() self._load_config_manager(config_manager) return config_manager @observe("config_manager") def _config_manager_changed(self, change): if change.new: self._load_config_manager(change.new) # The `extensions` attribute provides a dictionary # with extension (package) names mapped to their ExtensionPackage interface # (see above). This manager simplifies the interaction between the # ServerApp and the extensions being appended. extensions = Dict( help=""" Dictionary with extension package names as keys and ExtensionPackage objects as values. """ ) @property def sorted_extensions(self): """Returns an extensions dictionary, sorted alphabetically.""" return dict(sorted(self.extensions.items())) # The `_linked_extensions` attribute tracks when each extension # has been successfully linked to a ServerApp. This helps prevent # extensions from being re-linked recursively unintentionally if another # extension attempts to link extensions again. linked_extensions = Dict( help=""" Dictionary with extension names as keys values are True if the extension is linked, False if not. """ ) @property def extension_apps(self): """Return mapping of extension names and sets of ExtensionApp objects.""" return { name: {point.app for point in extension.extension_points.values() if point.app} for name, extension in self.extensions.items() } @property def extension_points(self): """Return mapping of extension point names and ExtensionPoint objects.""" return { name: point for value in self.extensions.values() for name, point in value.extension_points.items() } def from_config_manager(self, config_manager): """Add extensions found by an ExtensionConfigManager""" # load triggered via config_manager trait observer self.config_manager = config_manager def _load_config_manager(self, config_manager): """Actually load our config manager""" jpserver_extensions = config_manager.get_jpserver_extensions() self.from_jpserver_extensions(jpserver_extensions) def from_jpserver_extensions(self, jpserver_extensions): """Add extensions from 'jpserver_extensions'-like dictionary.""" for name, enabled in jpserver_extensions.items(): self.add_extension(name, enabled=enabled) def add_extension(self, extension_name, enabled=False): """Try to add extension to manager, return True if successful. Otherwise, return False. """ try: extpkg = ExtensionPackage(name=extension_name, enabled=enabled) self.extensions[extension_name] = extpkg return True # Raise a warning if the extension cannot be loaded. except Exception as e: if self.serverapp.reraise_server_extension_failures: raise self.log.warning(e) return False def link_extension(self, name): linked = self.linked_extensions.get(name, False) extension = self.extensions[name] if not linked and extension.enabled: try: # Link extension and store links extension.link_all_points(self.serverapp) self.linked_extensions[name] = True self.log.info("{name} | extension was successfully linked.".format(name=name)) except Exception as e: if self.serverapp.reraise_server_extension_failures: raise self.log.warning(e) def load_extension(self, name): extension = self.extensions.get(name) if extension.enabled: try: extension.load_all_points(self.serverapp) except Exception as e: if self.serverapp.reraise_server_extension_failures: raise self.log.debug("".join(traceback.format_exception(*sys.exc_info()))) self.log.warning( "{name} | extension failed loading with message: {error}".format( name=name, error=str(e) ) ) else: self.log.info("{name} | extension was successfully loaded.".format(name=name)) async def stop_extension(self, name, apps): """Call the shutdown hooks in the specified apps.""" for app in apps: self.log.debug('{} | extension app "{}" stopping'.format(name, app.name)) await app.stop_extension() self.log.debug('{} | extension app "{}" stopped'.format(name, app.name)) def link_all_extensions(self): """Link all enabled extensions to an instance of ServerApp """ # Sort the extension names to enforce deterministic linking # order. for name in self.sorted_extensions.keys(): self.link_extension(name) def load_all_extensions(self): """Load all enabled extensions and append them to the parent ServerApp. """ # Sort the extension names to enforce deterministic loading # order. for name in self.sorted_extensions.keys(): self.load_extension(name) async def stop_all_extensions(self): """Call the shutdown hooks in all extensions.""" await multi( [ self.stop_extension(name, apps) for name, apps in sorted(dict(self.extension_apps).items()) ] ) jupyter_server-1.13.1/jupyter_server/extension/serverextension.py000066400000000000000000000304031415445537200256200ustar00rootroot00000000000000# coding: utf-8 """Utilities for installing extensions""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os import sys from jupyter_core.application import JupyterApp from jupyter_core.paths import ENV_CONFIG_PATH from jupyter_core.paths import jupyter_config_dir from jupyter_core.paths import SYSTEM_CONFIG_PATH from tornado.log import LogFormatter from traitlets import Bool from jupyter_server._version import __version__ from jupyter_server.extension.config import ExtensionConfigManager from jupyter_server.extension.manager import ExtensionManager from jupyter_server.extension.manager import ExtensionPackage def _get_config_dir(user=False, sys_prefix=False): """Get the location of config files for the current context Returns the string to the environment Parameters ---------- user : bool [default: False] Get the user's .jupyter config directory sys_prefix : bool [default: False] Get sys.prefix, i.e. ~/.envs/my-env/etc/jupyter """ if user and sys_prefix: sys_prefix = False if user: extdir = jupyter_config_dir() elif sys_prefix: extdir = ENV_CONFIG_PATH[0] else: extdir = SYSTEM_CONFIG_PATH[0] return extdir def _get_extmanager_for_context(write_dir="jupyter_server_config.d", user=False, sys_prefix=False): """Get an extension manager pointing at the current context Returns the path to the current context and an ExtensionManager object. Parameters ---------- write_dir : str [default: 'jupyter_server_config.d'] Name of config directory to write extension config. user : bool [default: False] Get the user's .jupyter config directory sys_prefix : bool [default: False] Get sys.prefix, i.e. ~/.envs/my-env/etc/jupyter """ config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix) config_manager = ExtensionConfigManager( read_config_path=[config_dir], write_config_dir=os.path.join(config_dir, write_dir), ) extension_manager = ExtensionManager( config_manager=config_manager, ) return config_dir, extension_manager class ArgumentConflict(ValueError): pass _base_flags = {} _base_flags.update(JupyterApp.flags) _base_flags.pop("y", None) _base_flags.pop("generate-config", None) _base_flags.update( { "user": ( { "BaseExtensionApp": { "user": True, } }, "Apply the operation only for the given user", ), "system": ( { "BaseExtensionApp": { "user": False, "sys_prefix": False, } }, "Apply the operation system-wide", ), "sys-prefix": ( { "BaseExtensionApp": { "sys_prefix": True, } }, "Use sys.prefix as the prefix for installing extensions (for environments, packaging)", ), "py": ( { "BaseExtensionApp": { "python": True, } }, "Install from a Python package", ), } ) _base_flags["python"] = _base_flags["py"] _base_aliases = {} _base_aliases.update(JupyterApp.aliases) class BaseExtensionApp(JupyterApp): """Base extension installer app""" _log_formatter_cls = LogFormatter flags = _base_flags aliases = _base_aliases version = __version__ user = Bool(False, config=True, help="Whether to do a user install") sys_prefix = Bool(True, config=True, help="Use the sys.prefix as the prefix") python = Bool(False, config=True, help="Install from a Python package") def _log_format_default(self): """A default format for messages""" return "%(message)s" @property def config_dir(self): return _get_config_dir(user=self.user, sys_prefix=self.sys_prefix) # Constants for pretty print extension listing function. # Window doesn't support coloring in the commandline GREEN_ENABLED = "\033[32menabled\033[0m" if os.name != "nt" else "enabled" RED_DISABLED = "\033[31mdisabled\033[0m" if os.name != "nt" else "disabled" GREEN_OK = "\033[32mOK\033[0m" if os.name != "nt" else "ok" RED_X = "\033[31m X\033[0m" if os.name != "nt" else " X" # ------------------------------------------------------------------------------ # Public API # ------------------------------------------------------------------------------ def toggle_server_extension_python( import_name, enabled=None, parent=None, user=False, sys_prefix=True ): """Toggle the boolean setting for a given server extension in a Jupyter config file. """ sys_prefix = False if user else sys_prefix config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix) manager = ExtensionConfigManager( read_config_path=[config_dir], write_config_dir=os.path.join(config_dir, "jupyter_server_config.d"), ) if enabled: manager.enable(import_name) else: manager.disable(import_name) # ---------------------------------------------------------------------- # Applications # ---------------------------------------------------------------------- flags = {} flags.update(BaseExtensionApp.flags) flags.pop("y", None) flags.pop("generate-config", None) flags.update( { "user": ( { "ToggleServerExtensionApp": { "user": True, } }, "Perform the operation for the current user", ), "system": ( { "ToggleServerExtensionApp": { "user": False, "sys_prefix": False, } }, "Perform the operation system-wide", ), "sys-prefix": ( { "ToggleServerExtensionApp": { "sys_prefix": True, } }, "Use sys.prefix as the prefix for installing server extensions", ), "py": ( { "ToggleServerExtensionApp": { "python": True, } }, "Install from a Python package", ), } ) flags["python"] = flags["py"] class ToggleServerExtensionApp(BaseExtensionApp): """A base class for enabling/disabling extensions""" name = "jupyter server extension enable/disable" description = "Enable/disable a server extension using frontend configuration files." flags = flags _toggle_value = Bool() _toggle_pre_message = "" _toggle_post_message = "" def toggle_server_extension(self, import_name): """Change the status of a named server extension. Uses the value of `self._toggle_value`. Parameters --------- import_name : str Importable Python module (dotted-notation) exposing the magic-named `load_jupyter_server_extension` function """ # Create an extension manager for this instance. config_dir, extension_manager = _get_extmanager_for_context( user=self.user, sys_prefix=self.sys_prefix ) try: self.log.info("{}: {}".format(self._toggle_pre_message.capitalize(), import_name)) self.log.info("- Writing config: {}".format(config_dir)) # Validate the server extension. self.log.info(" - Validating {}...".format(import_name)) # Interface with the Extension Package and validate. extpkg = ExtensionPackage(name=import_name) extpkg.validate() version = extpkg.version self.log.info(" {} {} {}".format(import_name, version, GREEN_OK)) # Toggle extension config. config = extension_manager.config_manager if self._toggle_value is True: config.enable(import_name) else: config.disable(import_name) # If successful, let's log. self.log.info(" - Extension successfully {}.".format(self._toggle_post_message)) except Exception as err: self.log.info(" {} Validation failed: {}".format(RED_X, err)) def start(self): """Perform the App's actions as configured""" if not self.extra_args: sys.exit("Please specify a server extension/package to enable or disable") for arg in self.extra_args: self.toggle_server_extension(arg) class EnableServerExtensionApp(ToggleServerExtensionApp): """An App that enables (and validates) Server Extensions""" name = "jupyter server extension enable" description = """ Enable a server extension in configuration. Usage jupyter server extension enable [--system|--sys-prefix] """ _toggle_value = True _toggle_pre_message = "enabling" _toggle_post_message = "enabled" class DisableServerExtensionApp(ToggleServerExtensionApp): """An App that disables Server Extensions""" name = "jupyter server extension disable" description = """ Disable a server extension in configuration. Usage jupyter server extension disable [--system|--sys-prefix] """ _toggle_value = False _toggle_pre_message = "disabling" _toggle_post_message = "disabled" class ListServerExtensionsApp(BaseExtensionApp): """An App that lists (and validates) Server Extensions""" name = "jupyter server extension list" version = __version__ description = "List all server extensions known by the configuration system" def list_server_extensions(self): """List all enabled and disabled server extensions, by config path Enabled extensions are validated, potentially generating warnings. """ configurations = ( {"user": True, "sys_prefix": False}, {"user": False, "sys_prefix": True}, {"user": False, "sys_prefix": False}, ) for option in configurations: config_dir, ext_manager = _get_extmanager_for_context(**option) self.log.info("Config dir: {}".format(config_dir)) for name, extension in ext_manager.extensions.items(): enabled = extension.enabled # Attempt to get extension metadata self.log.info(u" {} {}".format(name, GREEN_ENABLED if enabled else RED_DISABLED)) try: self.log.info(" - Validating {}...".format(name)) if not extension.validate(): raise ValueError("validation failed") version = extension.version self.log.info(" {} {} {}".format(name, version, GREEN_OK)) except Exception as err: self.log.warn(" {} {}".format(RED_X, err)) # Add a blank line between paths. self.log.info("") def start(self): """Perform the App's actions as configured""" self.list_server_extensions() _examples = """ jupyter server extension list # list all configured server extensions jupyter server extension enable --py # enable all server extensions in a Python package jupyter server extension disable --py # disable all server extensions in a Python package """ class ServerExtensionApp(BaseExtensionApp): """Root level server extension app""" name = "jupyter server extension" version = __version__ description = "Work with Jupyter server extensions" examples = _examples subcommands = dict( enable=(EnableServerExtensionApp, "Enable a server extension"), disable=(DisableServerExtensionApp, "Disable a server extension"), list=(ListServerExtensionsApp, "List server extensions"), ) def start(self): """Perform the App's actions as configured""" super(ServerExtensionApp, self).start() # The above should have called a subcommand and raised NoStart; if we # get here, it didn't, so we should self.log.info a message. subcmds = ", ".join(sorted(self.subcommands)) sys.exit("Please supply at least one subcommand: %s" % subcmds) main = ServerExtensionApp.launch_instance if __name__ == "__main__": main() jupyter_server-1.13.1/jupyter_server/extension/utils.py000066400000000000000000000063561415445537200235270ustar00rootroot00000000000000import importlib import warnings class ExtensionLoadingError(Exception): pass class ExtensionMetadataError(Exception): pass class ExtensionModuleNotFound(Exception): pass class NotAnExtensionApp(Exception): pass def get_loader(obj, logger=None): """Looks for _load_jupyter_server_extension as an attribute of the object or module. Adds backwards compatibility for old function name missing the underscore prefix. """ try: func = getattr(obj, "_load_jupyter_server_extension") except AttributeError: func = getattr(obj, "load_jupyter_server_extension") warnings.warn( "A `_load_jupyter_server_extension` function was not " "found in {name!s}. Instead, a `load_jupyter_server_extension` " "function was found and will be used for now. This function " "name will be deprecated in future releases " "of Jupyter Server.".format(name=obj), DeprecationWarning, ) except Exception: raise ExtensionLoadingError("_load_jupyter_server_extension function was not found.") return func def get_metadata(package_name, logger=None): """Find the extension metadata from an extension package. This looks for a `_jupyter_server_extension_points` function that returns metadata about all extension points within a Jupyter Server Extension pacakge. If it doesn't exist, return a basic metadata packet given the module name. """ module = importlib.import_module(package_name) try: return module, module._jupyter_server_extension_points() except AttributeError: pass # For backwards compatibility, we temporarily allow # _jupyter_server_extension_paths. We will remove in # a later release of Jupyter Server. try: extension_points = module._jupyter_server_extension_paths() if logger: logger.warning( "A `_jupyter_server_extension_points` function was not " "found in {name}. Instead, a `_jupyter_server_extension_paths` " "function was found and will be used for now. This function " "name will be deprecated in future releases " "of Jupyter Server.".format(name=package_name) ) return module, extension_points except AttributeError: pass # Dynamically create metadata if the package doesn't # provide it. if logger: logger.debug( "A `_jupyter_server_extension_points` function was " "not found in {name}, so Jupyter Server will look " "for extension points in the extension pacakge's " "root.".format(name=package_name) ) return module, [{"module": package_name, "name": package_name}] def validate_extension(name): """Raises an exception is the extension is missing a needed hook or metadata field. An extension is valid if: 1) name is an importable Python package. 1) the package has a _jupyter_server_extension_paths function 2) each extension path has a _load_jupyter_server_extension function If this works, nothing should happen. """ from .manager import ExtensionPackage return ExtensionPackage(name=name) jupyter_server-1.13.1/jupyter_server/files/000077500000000000000000000000001415445537200210715ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/files/__init__.py000066400000000000000000000000001415445537200231700ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/files/handlers.py000066400000000000000000000054561415445537200232550ustar00rootroot00000000000000"""Serve files directly from the ContentsManager.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json import mimetypes from base64 import decodebytes from tornado import web from jupyter_server.base.handlers import JupyterHandler from jupyter_server.utils import ensure_async class FilesHandler(JupyterHandler): """serve files via ContentsManager Normally used when ContentsManager is not a FileContentsManager. FileContentsManager subclasses use AuthenticatedFilesHandler by default, a subclass of StaticFileHandler. """ @property def content_security_policy(self): # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the notebook server. return super(FilesHandler, self).content_security_policy + "; sandbox allow-scripts" @web.authenticated def head(self, path): self.get(path, include_body=False) self.check_xsrf_cookie() return self.get(path, include_body=False) @web.authenticated async def get(self, path, include_body=True): # /files/ requests must originate from the same site self.check_xsrf_cookie() cm = self.contents_manager if await ensure_async(cm.is_hidden(path)) and not cm.allow_hidden: self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) path = path.strip("/") if "/" in path: _, name = path.rsplit("/", 1) else: name = path model = await ensure_async(cm.get(path, type="file", content=include_body)) if self.get_argument("download", False): self.set_attachment_header(name) # get mimetype from filename if name.lower().endswith(".ipynb"): self.set_header("Content-Type", "application/x-ipynb+json") else: cur_mime = mimetypes.guess_type(name)[0] if cur_mime == "text/plain": self.set_header("Content-Type", "text/plain; charset=UTF-8") elif cur_mime is not None: self.set_header("Content-Type", cur_mime) else: if model["format"] == "base64": self.set_header("Content-Type", "application/octet-stream") else: self.set_header("Content-Type", "text/plain; charset=UTF-8") if include_body: if model["format"] == "base64": b64_bytes = model["content"].encode("ascii") self.write(decodebytes(b64_bytes)) elif model["format"] == "json": self.write(json.dumps(model["content"])) else: self.write(model["content"]) self.flush() default_handlers = [] jupyter_server-1.13.1/jupyter_server/gateway/000077500000000000000000000000001415445537200214305ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/gateway/__init__.py000066400000000000000000000000001415445537200235270ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/gateway/gateway_client.py000066400000000000000000000400011415445537200247740ustar00rootroot00000000000000# Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json import os from socket import gaierror from tornado import web from tornado.httpclient import AsyncHTTPClient from tornado.httpclient import HTTPError from traitlets import Bool from traitlets import default from traitlets import Float from traitlets import Int from traitlets import TraitError from traitlets import Unicode from traitlets import validate from traitlets.config import SingletonConfigurable class GatewayClient(SingletonConfigurable): """This class manages the configuration. It's its own singleton class so that we can share these values across all objects. It also contains some helper methods to build request arguments out of the various config options. """ url = Unicode( default_value=None, allow_none=True, config=True, help="""The url of the Kernel or Enterprise Gateway server where kernel specifications are defined and kernel management takes place. If defined, this Notebook server acts as a proxy for all kernel management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) """, ) url_env = "JUPYTER_GATEWAY_URL" @default("url") def _url_default(self): return os.environ.get(self.url_env) @validate("url") def _url_validate(self, proposal): value = proposal["value"] # Ensure value, if present, starts with 'http' if value is not None and len(value) > 0: if not str(value).lower().startswith("http"): raise TraitError("GatewayClient url must start with 'http': '%r'" % value) return value ws_url = Unicode( default_value=None, allow_none=True, config=True, help="""The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) """, ) ws_url_env = "JUPYTER_GATEWAY_WS_URL" @default("ws_url") def _ws_url_default(self): default_value = os.environ.get(self.ws_url_env) if default_value is None: if self.gateway_enabled: default_value = self.url.lower().replace("http", "ws") return default_value @validate("ws_url") def _ws_url_validate(self, proposal): value = proposal["value"] # Ensure value, if present, starts with 'ws' if value is not None and len(value) > 0: if not str(value).lower().startswith("ws"): raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) return value kernels_endpoint_default_value = "/api/kernels" kernels_endpoint_env = "JUPYTER_GATEWAY_KERNELS_ENDPOINT" kernels_endpoint = Unicode( default_value=kernels_endpoint_default_value, config=True, help="""The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)""", ) @default("kernels_endpoint") def _kernels_endpoint_default(self): return os.environ.get(self.kernels_endpoint_env, self.kernels_endpoint_default_value) kernelspecs_endpoint_default_value = "/api/kernelspecs" kernelspecs_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT" kernelspecs_endpoint = Unicode( default_value=kernelspecs_endpoint_default_value, config=True, help="""The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)""", ) @default("kernelspecs_endpoint") def _kernelspecs_endpoint_default(self): return os.environ.get( self.kernelspecs_endpoint_env, self.kernelspecs_endpoint_default_value ) kernelspecs_resource_endpoint_default_value = "/kernelspecs" kernelspecs_resource_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT" kernelspecs_resource_endpoint = Unicode( default_value=kernelspecs_resource_endpoint_default_value, config=True, help="""The gateway endpoint for accessing kernelspecs resources (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""", ) @default("kernelspecs_resource_endpoint") def _kernelspecs_resource_endpoint_default(self): return os.environ.get( self.kernelspecs_resource_endpoint_env, self.kernelspecs_resource_endpoint_default_value ) connect_timeout_default_value = 40.0 connect_timeout_env = "JUPYTER_GATEWAY_CONNECT_TIMEOUT" connect_timeout = Float( default_value=connect_timeout_default_value, config=True, help="""The time allowed for HTTP connection establishment with the Gateway server. (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""", ) @default("connect_timeout") def connect_timeout_default(self): return float( os.environ.get("JUPYTER_GATEWAY_CONNECT_TIMEOUT", self.connect_timeout_default_value) ) request_timeout_default_value = 40.0 request_timeout_env = "JUPYTER_GATEWAY_REQUEST_TIMEOUT" request_timeout = Float( default_value=request_timeout_default_value, config=True, help="""The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var)""", ) @default("request_timeout") def request_timeout_default(self): return float( os.environ.get("JUPYTER_GATEWAY_REQUEST_TIMEOUT", self.request_timeout_default_value) ) client_key = Unicode( default_value=None, allow_none=True, config=True, help="""The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) """, ) client_key_env = "JUPYTER_GATEWAY_CLIENT_KEY" @default("client_key") def _client_key_default(self): return os.environ.get(self.client_key_env) client_cert = Unicode( default_value=None, allow_none=True, config=True, help="""The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT env var) """, ) client_cert_env = "JUPYTER_GATEWAY_CLIENT_CERT" @default("client_cert") def _client_cert_default(self): return os.environ.get(self.client_cert_env) ca_certs = Unicode( default_value=None, allow_none=True, config=True, help="""The filename of CA certificates or None to use defaults. (JUPYTER_GATEWAY_CA_CERTS env var) """, ) ca_certs_env = "JUPYTER_GATEWAY_CA_CERTS" @default("ca_certs") def _ca_certs_default(self): return os.environ.get(self.ca_certs_env) http_user = Unicode( default_value=None, allow_none=True, config=True, help="""The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) """, ) http_user_env = "JUPYTER_GATEWAY_HTTP_USER" @default("http_user") def _http_user_default(self): return os.environ.get(self.http_user_env) http_pwd = Unicode( default_value=None, allow_none=True, config=True, help="""The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) """, ) http_pwd_env = "JUPYTER_GATEWAY_HTTP_PWD" @default("http_pwd") def _http_pwd_default(self): return os.environ.get(self.http_pwd_env) headers_default_value = "{}" headers_env = "JUPYTER_GATEWAY_HEADERS" headers = Unicode( default_value=headers_default_value, allow_none=True, config=True, help="""Additional HTTP headers to pass on the request. This value will be converted to a dict. (JUPYTER_GATEWAY_HEADERS env var) """, ) @default("headers") def _headers_default(self): return os.environ.get(self.headers_env, self.headers_default_value) auth_token = Unicode( default_value=None, allow_none=True, config=True, help="""The authorization token used in the HTTP headers. The header will be formatted as: { 'Authorization': '{auth_scheme} {auth_token}' } (JUPYTER_GATEWAY_AUTH_TOKEN env var)""", ) auth_token_env = "JUPYTER_GATEWAY_AUTH_TOKEN" @default("auth_token") def _auth_token_default(self): return os.environ.get(self.auth_token_env, "") auth_scheme = Unicode( default_value=None, allow_none=True, config=True, help="""The auth scheme, added as a prefix to the authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_SCHEME env var)""", ) auth_scheme_env = "JUPYTER_GATEWAY_AUTH_SCHEME" @default("auth_scheme") def _auth_scheme_default(self): return os.environ.get(self.auth_scheme_env, "token") validate_cert_default_value = True validate_cert_env = "JUPYTER_GATEWAY_VALIDATE_CERT" validate_cert = Bool( default_value=validate_cert_default_value, config=True, help="""For HTTPS requests, determines if server's certificate should be validated or not. (JUPYTER_GATEWAY_VALIDATE_CERT env var)""", ) @default("validate_cert") def validate_cert_default(self): return bool( os.environ.get(self.validate_cert_env, str(self.validate_cert_default_value)) not in ["no", "false"] ) def __init__(self, **kwargs): super().__init__(**kwargs) self._static_args = {} # initialized on first use env_whitelist_default_value = "" env_whitelist_env = "JUPYTER_GATEWAY_ENV_WHITELIST" env_whitelist = Unicode( default_value=env_whitelist_default_value, config=True, help="""A comma-separated list of environment variable names that will be included, along with their values, in the kernel startup request. The corresponding `env_whitelist` configuration value must also be set on the Gateway server - since that configuration value indicates which environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)""", ) @default("env_whitelist") def _env_whitelist_default(self): return os.environ.get(self.env_whitelist_env, self.env_whitelist_default_value) gateway_retry_interval_default_value = 1.0 gateway_retry_interval_env = "JUPYTER_GATEWAY_RETRY_INTERVAL" gateway_retry_interval = Float( default_value=gateway_retry_interval_default_value, config=True, help="""The time allowed for HTTP reconnection with the Gateway server for the first time. Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. (JUPYTER_GATEWAY_RETRY_INTERVAL env var)""", ) @default("gateway_retry_interval") def gateway_retry_interval_default(self): return float( os.environ.get( "JUPYTER_GATEWAY_RETRY_INTERVAL", self.gateway_retry_interval_default_value ) ) gateway_retry_interval_max_default_value = 30.0 gateway_retry_interval_max_env = "JUPYTER_GATEWAY_RETRY_INTERVAL_MAX" gateway_retry_interval_max = Float( default_value=gateway_retry_interval_max_default_value, config=True, help="""The maximum time allowed for HTTP reconnection retry with the Gateway server. (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""", ) @default("gateway_retry_interval_max") def gateway_retry_interval_max_default(self): return float( os.environ.get( "JUPYTER_GATEWAY_RETRY_INTERVAL_MAX", self.gateway_retry_interval_max_default_value ) ) gateway_retry_max_default_value = 5 gateway_retry_max_env = "JUPYTER_GATEWAY_RETRY_MAX" gateway_retry_max = Int( default_value=gateway_retry_max_default_value, config=True, help="""The maximum retries allowed for HTTP reconnection with the Gateway server. (JUPYTER_GATEWAY_RETRY_MAX env var)""", ) @default("gateway_retry_max") def gateway_retry_max_default(self): return int( os.environ.get("JUPYTER_GATEWAY_RETRY_MAX", self.gateway_retry_max_default_value) ) @property def gateway_enabled(self): return bool(self.url is not None and len(self.url) > 0) # Ensure KERNEL_LAUNCH_TIMEOUT has a default value. KERNEL_LAUNCH_TIMEOUT = int(os.environ.get("KERNEL_LAUNCH_TIMEOUT", 40)) def init_static_args(self): """Initialize arguments used on every request. Since these are static values, we'll perform this operation once. """ # Ensure that request timeout and KERNEL_LAUNCH_TIMEOUT are the same, taking the # greater value of the two. if self.request_timeout < float(GatewayClient.KERNEL_LAUNCH_TIMEOUT): self.request_timeout = float(GatewayClient.KERNEL_LAUNCH_TIMEOUT) elif self.request_timeout > float(GatewayClient.KERNEL_LAUNCH_TIMEOUT): GatewayClient.KERNEL_LAUNCH_TIMEOUT = int(self.request_timeout) # Ensure any adjustments are reflected in env. os.environ["KERNEL_LAUNCH_TIMEOUT"] = str(GatewayClient.KERNEL_LAUNCH_TIMEOUT) self._static_args["headers"] = json.loads(self.headers) if "Authorization" not in self._static_args["headers"].keys(): self._static_args["headers"].update( {"Authorization": "{} {}".format(self.auth_scheme, self.auth_token)} ) self._static_args["connect_timeout"] = self.connect_timeout self._static_args["request_timeout"] = self.request_timeout self._static_args["validate_cert"] = self.validate_cert if self.client_cert: self._static_args["client_cert"] = self.client_cert self._static_args["client_key"] = self.client_key if self.ca_certs: self._static_args["ca_certs"] = self.ca_certs if self.http_user: self._static_args["auth_username"] = self.http_user if self.http_pwd: self._static_args["auth_password"] = self.http_pwd def load_connection_args(self, **kwargs): """Merges the static args relative to the connection, with the given keyword arguments. If statics have yet to be initialized, we'll do that here. """ if len(self._static_args) == 0: self.init_static_args() kwargs.update(self._static_args) return kwargs async def gateway_request(endpoint, **kwargs): """Make an async request to kernel gateway endpoint, returns a response """ client = AsyncHTTPClient() kwargs = GatewayClient.instance().load_connection_args(**kwargs) try: response = await client.fetch(endpoint, **kwargs) # Trap a set of common exceptions so that we can inform the user that their Gateway url is incorrect # or the server is not running. # NOTE: We do this here since this handler is called during the Notebook's startup and subsequent refreshes # of the tree view. except ConnectionRefusedError as e: raise web.HTTPError( 503, "Connection refused from Gateway server url '{}'. " "Check to be sure the Gateway instance is running.".format( GatewayClient.instance().url ), ) from e except HTTPError as e: # This can occur if the host is valid (e.g., foo.com) but there's nothing there. raise web.HTTPError( e.code, "Error attempting to connect to Gateway server url '{}'. " "Ensure gateway url is valid and the Gateway instance is running.".format( GatewayClient.instance().url ), ) from e except gaierror as e: raise web.HTTPError( 404, "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. " "Ensure gateway url is valid and the Gateway instance is running.".format( GatewayClient.instance().url ), ) from e return response jupyter_server-1.13.1/jupyter_server/gateway/handlers.py000066400000000000000000000257271415445537200236170ustar00rootroot00000000000000# Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import logging import mimetypes import os import random from ipython_genutils.py3compat import cast_unicode from jupyter_client.session import Session from tornado import web from tornado.concurrent import Future from tornado.escape import json_decode from tornado.escape import url_escape from tornado.escape import utf8 from tornado.httpclient import HTTPRequest from tornado.ioloop import IOLoop from tornado.ioloop import PeriodicCallback from tornado.websocket import websocket_connect from tornado.websocket import WebSocketHandler from traitlets.config.configurable import LoggingConfigurable from ..base.handlers import APIHandler from ..base.handlers import JupyterHandler from ..utils import url_path_join from .managers import GatewayClient # Keepalive ping interval (default: 30 seconds) GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv("GATEWAY_WS_PING_INTERVAL_SECS", 30)) class WebSocketChannelsHandler(WebSocketHandler, JupyterHandler): session = None gateway = None kernel_id = None ping_callback = None def check_origin(self, origin=None): return JupyterHandler.check_origin(self, origin) def set_default_headers(self): """Undo the set_default_headers in JupyterHandler which doesn't make sense for websockets""" pass def get_compression_options(self): # use deflate compress websocket return {} def authenticate(self): """Run before finishing the GET request Extend this method to add logic that should fire before the websocket finishes completing. """ # authenticate the request before opening the websocket if self.get_current_user() is None: self.log.warning("Couldn't authenticate WebSocket connection") raise web.HTTPError(403) if self.get_argument("session_id", False): self.session.session = cast_unicode(self.get_argument("session_id")) else: self.log.warning("No session ID specified") def initialize(self): self.log.debug("Initializing websocket connection %s", self.request.path) self.session = Session(config=self.config) self.gateway = GatewayWebSocketClient(gateway_url=GatewayClient.instance().url) async def get(self, kernel_id, *args, **kwargs): self.authenticate() self.kernel_id = cast_unicode(kernel_id, "ascii") await super(WebSocketChannelsHandler, self).get(kernel_id=kernel_id, *args, **kwargs) def send_ping(self): if self.ws_connection is None and self.ping_callback is not None: self.ping_callback.stop() return self.ping(b"") def open(self, kernel_id, *args, **kwargs): """Handle web socket connection open to notebook server and delegate to gateway web socket handler """ self.ping_callback = PeriodicCallback(self.send_ping, GATEWAY_WS_PING_INTERVAL_SECS * 1000) self.ping_callback.start() self.gateway.on_open( kernel_id=kernel_id, message_callback=self.write_message, compression_options=self.get_compression_options(), ) def on_message(self, message): """Forward message to gateway web socket handler.""" self.gateway.on_message(message) def write_message(self, message, binary=False): """Send message back to notebook client. This is called via callback from self.gateway._read_messages.""" if self.ws_connection: # prevent WebSocketClosedError if isinstance(message, bytes): binary = True super(WebSocketChannelsHandler, self).write_message(message, binary=binary) elif self.log.isEnabledFor(logging.DEBUG): msg_summary = WebSocketChannelsHandler._get_message_summary(json_decode(utf8(message))) self.log.debug( "Notebook client closed websocket connection - message dropped: {}".format( msg_summary ) ) def on_close(self): self.log.debug("Closing websocket connection %s", self.request.path) self.gateway.on_close() super(WebSocketChannelsHandler, self).on_close() @staticmethod def _get_message_summary(message): summary = [] message_type = message["msg_type"] summary.append("type: {}".format(message_type)) if message_type == "status": summary.append(", state: {}".format(message["content"]["execution_state"])) elif message_type == "error": summary.append( ", {}:{}:{}".format( message["content"]["ename"], message["content"]["evalue"], message["content"]["traceback"], ) ) else: summary.append(", ...") # don't display potentially sensitive data return "".join(summary) class GatewayWebSocketClient(LoggingConfigurable): """Proxy web socket connection to a kernel/enterprise gateway.""" def __init__(self, **kwargs): super(GatewayWebSocketClient, self).__init__(**kwargs) self.kernel_id = None self.ws = None self.ws_future = Future() self.disconnected = False self.retry = 0 async def _connect(self, kernel_id, message_callback): # websocket is initialized before connection self.ws = None self.kernel_id = kernel_id ws_url = url_path_join( GatewayClient.instance().ws_url, GatewayClient.instance().kernels_endpoint, url_escape(kernel_id), "channels", ) self.log.info("Connecting to {}".format(ws_url)) kwargs = {} kwargs = GatewayClient.instance().load_connection_args(**kwargs) request = HTTPRequest(ws_url, **kwargs) self.ws_future = websocket_connect(request) self.ws_future.add_done_callback(self._connection_done) loop = IOLoop.current() loop.add_future(self.ws_future, lambda future: self._read_messages(message_callback)) def _connection_done(self, fut): if ( not self.disconnected and fut.exception() is None ): # prevent concurrent.futures._base.CancelledError self.ws = fut.result() self.retry = 0 self.log.debug("Connection is ready: ws: {}".format(self.ws)) else: self.log.warning( "Websocket connection has been closed via client disconnect or due to error. " "Kernel with ID '{}' may not be terminated on GatewayClient: {}".format( self.kernel_id, GatewayClient.instance().url ) ) def _disconnect(self): self.disconnected = True if self.ws is not None: # Close connection self.ws.close() elif not self.ws_future.done(): # Cancel pending connection. Since future.cancel() is a noop on tornado, we'll track cancellation locally self.ws_future.cancel() self.log.debug( "_disconnect: future cancelled, disconnected: {}".format(self.disconnected) ) async def _read_messages(self, callback): """Read messages from gateway server.""" while self.ws is not None: message = None if not self.disconnected: try: message = await self.ws.read_message() except Exception as e: self.log.error( "Exception reading message from websocket: {}".format(e) ) # , exc_info=True) if message is None: if not self.disconnected: self.log.warning("Lost connection to Gateway: {}".format(self.kernel_id)) break callback( message ) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) else: # ws cancelled - stop reading break # NOTE(esevan): if websocket is not disconnected by client, try to reconnect. if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max: jitter = random.randint(10, 100) * 0.01 retry_interval = ( min( GatewayClient.instance().gateway_retry_interval * (2 ** self.retry), GatewayClient.instance().gateway_retry_interval_max, ) + jitter ) self.retry += 1 self.log.info( "Attempting to re-establish the connection to Gateway in %s secs (%s/%s): %s", retry_interval, self.retry, GatewayClient.instance().gateway_retry_max, self.kernel_id, ) await asyncio.sleep(retry_interval) loop = IOLoop.current() loop.spawn_callback(self._connect, self.kernel_id, callback) def on_open(self, kernel_id, message_callback, **kwargs): """Web socket connection open against gateway server.""" loop = IOLoop.current() loop.spawn_callback(self._connect, kernel_id, message_callback) def on_message(self, message): """Send message to gateway server.""" if self.ws is None: loop = IOLoop.current() loop.add_future(self.ws_future, lambda future: self._write_message(message)) else: self._write_message(message) def _write_message(self, message): """Send message to gateway server.""" try: if not self.disconnected and self.ws is not None: self.ws.write_message(message) except Exception as e: self.log.error( "Exception writing message to websocket: {}".format(e) ) # , exc_info=True) def on_close(self): """Web socket closed event.""" self._disconnect() class GatewayResourceHandler(APIHandler): """Retrieves resources for specific kernelspec definitions from kernel/enterprise gateway.""" @web.authenticated async def get(self, kernel_name, path, include_body=True): ksm = self.kernel_spec_manager kernel_spec_res = await ksm.get_kernel_spec_resource(kernel_name, path) if kernel_spec_res is None: self.log.warning( "Kernelspec resource '{}' for '{}' not found. Gateway may not support" " resource serving.".format(path, kernel_name) ) else: self.set_header("Content-Type", mimetypes.guess_type(path)[0]) self.finish(kernel_spec_res) from ..services.kernels.handlers import _kernel_id_regex from ..services.kernelspecs.handlers import kernel_name_regex default_handlers = [ (r"/api/kernels/%s/channels" % _kernel_id_regex, WebSocketChannelsHandler), (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, GatewayResourceHandler), ] jupyter_server-1.13.1/jupyter_server/gateway/managers.py000066400000000000000000000661541415445537200236130ustar00rootroot00000000000000# Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import datetime import json import os from logging import Logger from queue import Queue from threading import Thread from typing import Dict import websocket from jupyter_client.asynchronous.client import AsyncKernelClient from jupyter_client.clientabc import KernelClientABC from jupyter_client.kernelspec import KernelSpecManager from jupyter_client.manager import AsyncKernelManager from jupyter_client.managerabc import KernelManagerABC from tornado import web from tornado.escape import json_decode from tornado.escape import json_encode from tornado.escape import url_escape from tornado.escape import utf8 from traitlets import default from traitlets import DottedObjectName from traitlets import Instance from traitlets import Type from .._tz import UTC from ..services.kernels.kernelmanager import AsyncMappingKernelManager from ..services.sessions.sessionmanager import SessionManager from ..utils import ensure_async from ..utils import url_path_join from .gateway_client import gateway_request from .gateway_client import GatewayClient class GatewayMappingKernelManager(AsyncMappingKernelManager): """Kernel manager that supports remote kernels hosted by Jupyter Kernel or Enterprise Gateway.""" # We'll maintain our own set of kernel ids _kernels: Dict[str, "GatewayKernelManager"] = {} @default("kernel_manager_class") def _default_kernel_manager_class(self): return "jupyter_server.gateway.managers.GatewayKernelManager" @default("shared_context") def _default_shared_context(self): return False # no need to share zmq contexts def __init__(self, **kwargs): super().__init__(**kwargs) self.kernels_url = url_path_join( GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint ) def remove_kernel(self, kernel_id): """Complete override since we want to be more tolerant of missing keys """ try: return self._kernels.pop(kernel_id) except KeyError: pass async def start_kernel(self, kernel_id=None, path=None, **kwargs): """Start a kernel for a session and return its kernel_id. Parameters ---------- kernel_id : uuid The uuid to associate the new kernel with. If this is not None, this kernel will be persistent whenever it is requested. path : API path The API path (unicode, '/' delimited) for the cwd. Will be transformed to an OS path relative to root_dir. """ self.log.info(f"Request start kernel: kernel_id={kernel_id}, path='{path}'") if kernel_id is None: if path is not None: kwargs["cwd"] = self.cwd_for_path(path) km = self.kernel_manager_factory(parent=self, log=self.log) await km.start_kernel(**kwargs) kernel_id = km.kernel_id self._kernels[kernel_id] = km # Initialize culling if not already if not self._initialized_culler: self.initialize_culler() return kernel_id async def kernel_model(self, kernel_id): """Return a dictionary of kernel information described in the JSON standard model. Parameters ---------- kernel_id : uuid The uuid of the kernel. """ model = None km = self.get_kernel(kernel_id) if km: model = km.kernel return model async def list_kernels(self, **kwargs): """Get a list of running kernels from the Gateway server. We'll use this opportunity to refresh the models in each of the kernels we're managing. """ self.log.debug(f"Request list kernels: {self.kernels_url}") response = await gateway_request(self.kernels_url, method="GET") kernels = json_decode(response.body) # Refresh our models to those we know about, and filter # the return value with only our kernels. kernel_models = {} for model in kernels: kid = model["id"] if kid in self._kernels: await self._kernels[kid].refresh_model(model) kernel_models[kid] = model # Remove any of our kernels that may have been culled on the gateway server our_kernels = self._kernels.copy() culled_ids = [] for kid, km in our_kernels.items(): if kid not in kernel_models: self.log.warn(f"Kernel {kid} no longer active - probably culled on Gateway server.") self._kernels.pop(kid, None) culled_ids.append(kid) # TODO: Figure out what do with these. return list(kernel_models.values()) async def shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by its kernel uuid. Parameters ========== kernel_id : uuid The id of the kernel to shutdown. now : bool Shutdown the kernel immediately (True) or gracefully (False) restart : bool The purpose of this shutdown is to restart the kernel (True) """ km = self.get_kernel(kernel_id) await km.shutdown_kernel(now=now, restart=restart) self.remove_kernel(kernel_id) async def restart_kernel(self, kernel_id, now=False, **kwargs): """Restart a kernel by its kernel uuid. Parameters ========== kernel_id : uuid The id of the kernel to restart. """ km = self.get_kernel(kernel_id) await km.restart_kernel(now=now, **kwargs) async def interrupt_kernel(self, kernel_id, **kwargs): """Interrupt a kernel by its kernel uuid. Parameters ========== kernel_id : uuid The id of the kernel to interrupt. """ km = self.get_kernel(kernel_id) await km.interrupt_kernel() async def shutdown_all(self, now=False): """Shutdown all kernels.""" for kernel_id in self._kernels: km = self.get_kernel(kernel_id) await km.shutdown_kernel(now=now) self.remove_kernel(kernel_id) async def cull_kernels(self): """Override cull_kernels so we can be sure their state is current. """ await self.list_kernels() await super().cull_kernels() class GatewayKernelSpecManager(KernelSpecManager): def __init__(self, **kwargs): super().__init__(**kwargs) base_endpoint = url_path_join( GatewayClient.instance().url, GatewayClient.instance().kernelspecs_endpoint ) self.base_endpoint = GatewayKernelSpecManager._get_endpoint_for_user_filter(base_endpoint) self.base_resource_endpoint = url_path_join( GatewayClient.instance().url, GatewayClient.instance().kernelspecs_resource_endpoint ) @staticmethod def _get_endpoint_for_user_filter(default_endpoint): kernel_user = os.environ.get("KERNEL_USERNAME") if kernel_user: return "?user=".join([default_endpoint, kernel_user]) return default_endpoint def _get_kernelspecs_endpoint_url(self, kernel_name=None): """Builds a url for the kernels endpoint Parameters ---------- kernel_name : kernel name (optional) """ if kernel_name: return url_path_join(self.base_endpoint, url_escape(kernel_name)) return self.base_endpoint async def get_all_specs(self): fetched_kspecs = await self.list_kernel_specs() # get the default kernel name and compare to that of this server. # If different log a warning and reset the default. However, the # caller of this method will still return this server's value until # the next fetch of kernelspecs - at which time they'll match. km = self.parent.kernel_manager remote_default_kernel_name = fetched_kspecs.get("default") if remote_default_kernel_name != km.default_kernel_name: self.log.info( f"Default kernel name on Gateway server ({remote_default_kernel_name}) differs from " f"Notebook server ({km.default_kernel_name}). Updating to Gateway server's value." ) km.default_kernel_name = remote_default_kernel_name remote_kspecs = fetched_kspecs.get("kernelspecs") return remote_kspecs async def list_kernel_specs(self): """Get a list of kernel specs.""" kernel_spec_url = self._get_kernelspecs_endpoint_url() self.log.debug(f"Request list kernel specs at: {kernel_spec_url}") response = await gateway_request(kernel_spec_url, method="GET") kernel_specs = json_decode(response.body) return kernel_specs async def get_kernel_spec(self, kernel_name, **kwargs): """Get kernel spec for kernel_name. Parameters ---------- kernel_name : str The name of the kernel. """ kernel_spec_url = self._get_kernelspecs_endpoint_url(kernel_name=str(kernel_name)) self.log.debug(f"Request kernel spec at: {kernel_spec_url}") try: response = await gateway_request(kernel_spec_url, method="GET") except web.HTTPError as error: if error.status_code == 404: # Convert not found to KeyError since that's what the Notebook handler expects # message is not used, but might as well make it useful for troubleshooting raise KeyError( "kernelspec {kernel_name} not found on Gateway server at: {gateway_url}".format( kernel_name=kernel_name, gateway_url=GatewayClient.instance().url ) ) from error else: raise else: kernel_spec = json_decode(response.body) return kernel_spec async def get_kernel_spec_resource(self, kernel_name, path): """Get kernel spec for kernel_name. Parameters ---------- kernel_name : str The name of the kernel. path : str The name of the desired resource """ kernel_spec_resource_url = url_path_join( self.base_resource_endpoint, str(kernel_name), str(path) ) self.log.debug(f"Request kernel spec resource '{path}' at: {kernel_spec_resource_url}") try: response = await gateway_request(kernel_spec_resource_url, method="GET") except web.HTTPError as error: if error.status_code == 404: kernel_spec_resource = None else: raise else: kernel_spec_resource = response.body return kernel_spec_resource class GatewaySessionManager(SessionManager): kernel_manager = Instance("jupyter_server.gateway.managers.GatewayMappingKernelManager") async def kernel_culled(self, kernel_id): """Checks if the kernel is still considered alive and returns true if its not found. """ kernel = None try: km = self.kernel_manager.get_kernel(kernel_id) kernel = await km.refresh_model() except Exception: # Let exceptions here reflect culled kernel pass return kernel is None """KernelManager class to manage a kernel running on a Gateway Server via the REST API""" class GatewayKernelManager(AsyncKernelManager): """Manages a single kernel remotely via a Gateway Server. """ kernel_id = None kernel = None @default("cache_ports") def _default_cache_ports(self): return False # no need to cache ports here def __init__(self, **kwargs): super().__init__(**kwargs) self.kernels_url = url_path_join( GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint ) self.kernel_url = self.kernel = self.kernel_id = None # simulate busy/activity markers: self.execution_state = self.last_activity = None @property def has_kernel(self): """Has a kernel been started that we are managing.""" return self.kernel is not None client_class = DottedObjectName("jupyter_server.gateway.managers.GatewayKernelClient") client_factory = Type(klass="jupyter_server.gateway.managers.GatewayKernelClient") # -------------------------------------------------------------------------- # create a Client connected to our Kernel # -------------------------------------------------------------------------- def client(self, **kwargs): """Create a client configured to connect to our kernel""" kw = {} kw.update(self.get_connection_info(session=True)) kw.update( dict( connection_file=self.connection_file, parent=self, ) ) kw["kernel_id"] = self.kernel_id # add kwargs last, for manual overrides kw.update(kwargs) return self.client_factory(**kw) async def refresh_model(self, model=None): """Refresh the kernel model. Parameters ---------- model : dict The model from which to refresh the kernel. If None, the kernel model is fetched from the Gateway server. """ if model is None: self.log.debug("Request kernel at: %s" % self.kernel_url) try: response = await gateway_request(self.kernel_url, method="GET") except web.HTTPError as error: if error.status_code == 404: self.log.warning("Kernel not found at: %s" % self.kernel_url) model = None else: raise else: model = json_decode(response.body) self.log.debug("Kernel retrieved: %s" % model) if model: # Update activity markers self.last_activity = datetime.datetime.strptime( model["last_activity"], "%Y-%m-%dT%H:%M:%S.%fZ" ).replace(tzinfo=UTC) self.execution_state = model["execution_state"] if isinstance(self.parent, AsyncMappingKernelManager): # Update connections only if there's a mapping kernel manager parent for # this kernel manager. The current kernel manager instance may not have # an parent instance if, say, a server extension is using another application # (e.g., papermill) that uses a KernelManager instance directly. self.parent._kernel_connections[self.kernel_id] = int(model["connections"]) self.kernel = model return model # -------------------------------------------------------------------------- # Kernel management # -------------------------------------------------------------------------- async def start_kernel(self, **kwargs): """Starts a kernel via HTTP in an asynchronous manner. Parameters ---------- `**kwargs` : optional keyword arguments that are passed down to build the kernel_cmd and launching the kernel (e.g. Popen kwargs). """ kernel_id = kwargs.get("kernel_id") if kernel_id is None: kernel_name = kwargs.get("kernel_name", "python3") self.log.debug("Request new kernel at: %s" % self.kernels_url) # Let KERNEL_USERNAME take precedent over http_user config option. if os.environ.get("KERNEL_USERNAME") is None and GatewayClient.instance().http_user: os.environ["KERNEL_USERNAME"] = GatewayClient.instance().http_user kernel_env = { k: v for (k, v) in dict(os.environ).items() if k.startswith("KERNEL_") or k in GatewayClient.instance().env_whitelist.split(",") } # Add any env entries in this request kernel_env.update(kwargs.get("env", {})) # Convey the full path to where this notebook file is located. if kwargs.get("cwd") is not None and kernel_env.get("KERNEL_WORKING_DIR") is None: kernel_env["KERNEL_WORKING_DIR"] = kwargs["cwd"] json_body = json_encode({"name": kernel_name, "env": kernel_env}) response = await gateway_request(self.kernels_url, method="POST", body=json_body) self.kernel = json_decode(response.body) self.kernel_id = self.kernel["id"] self.log.info( "GatewayKernelManager started kernel: {}, args: {}".format(self.kernel_id, kwargs) ) else: self.kernel_id = kernel_id self.kernel = await self.refresh_model() self.log.info("GatewayKernelManager using existing kernel: {}".format(self.kernel_id)) self.kernel_url = url_path_join(self.kernels_url, url_escape(str(self.kernel_id))) async def shutdown_kernel(self, now=False, restart=False): """Attempts to stop the kernel process cleanly via HTTP. """ if self.has_kernel: self.log.debug("Request shutdown kernel at: %s", self.kernel_url) response = await gateway_request(self.kernel_url, method="DELETE") self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) async def restart_kernel(self, **kw): """Restarts a kernel via HTTP. """ if self.has_kernel: kernel_url = self.kernel_url + "/restart" self.log.debug("Request restart kernel at: %s", kernel_url) response = await gateway_request(kernel_url, method="POST", body=json_encode({})) self.log.debug("Restart kernel response: %d %s", response.code, response.reason) async def interrupt_kernel(self): """Interrupts the kernel via an HTTP request. """ if self.has_kernel: kernel_url = self.kernel_url + "/interrupt" self.log.debug("Request interrupt kernel at: %s", kernel_url) response = await gateway_request(kernel_url, method="POST", body=json_encode({})) self.log.debug("Interrupt kernel response: %d %s", response.code, response.reason) async def is_alive(self): """Is the kernel process still running?""" if self.has_kernel: # Go ahead and issue a request to get the kernel self.kernel = await self.refresh_model() return True else: # we don't have a kernel return False def cleanup_resources(self, restart=False): """Clean up resources when the kernel is shut down""" pass KernelManagerABC.register(GatewayKernelManager) class ChannelQueue(Queue): channel_name: str = None def __init__(self, channel_name: str, channel_socket: websocket.WebSocket, log: Logger): super().__init__() self.channel_name = channel_name self.channel_socket = channel_socket self.log = log async def get_msg(self, *args, **kwargs) -> dict: timeout = kwargs.get("timeout", 1) msg = self.get(timeout=timeout) self.log.debug( "Received message on channel: {}, msg_id: {}, msg_type: {}".format( self.channel_name, msg["msg_id"], msg["msg_type"] if msg else "null" ) ) self.task_done() return msg def send(self, msg: dict) -> None: message = json.dumps(msg, default=ChannelQueue.serialize_datetime).replace(" None: pass def stop(self) -> None: if not self.empty(): # If unprocessed messages are detected, drain the queue collecting non-status # messages. If any remain that are not 'shutdown_reply' and this is not iopub # go ahead and issue a warning. msgs = [] while self.qsize(): msg = self.get_nowait() if msg["msg_type"] != "status": msgs.append(msg["msg_type"]) if self.channel_name == "iopub" and "shutdown_reply" in msgs: return if len(msgs): self.log.warning( "Stopping channel '{}' with {} unprocessed non-status messages: {}.".format( self.channel_name, len(msgs), msgs ) ) def is_alive(self) -> bool: return self.channel_socket is not None class HBChannelQueue(ChannelQueue): def is_beating(self) -> bool: # Just use the is_alive status for now return self.is_alive() class GatewayKernelClient(AsyncKernelClient): """Communicates with a single kernel indirectly via a websocket to a gateway server. There are five channels associated with each kernel: * shell: for request/reply calls to the kernel. * iopub: for the kernel to publish results to frontends. * hb: for monitoring the kernel's heartbeat. * stdin: for frontends to reply to raw_input calls in the kernel. * control: for kernel management calls to the kernel. The messages that can be sent on these channels are exposed as methods of the client (KernelClient.execute, complete, history, etc.). These methods only send the message, they don't wait for a reply. To get results, use e.g. :meth:`get_shell_msg` to fetch messages from the shell channel. """ # flag for whether execute requests should be allowed to call raw_input: allow_stdin = False _channels_stopped = False _channel_queues = {} def __init__(self, **kwargs): super().__init__(**kwargs) self.kernel_id = kwargs["kernel_id"] self.channel_socket = None self.response_router = None # -------------------------------------------------------------------------- # Channel management methods # -------------------------------------------------------------------------- async def start_channels(self, shell=True, iopub=True, stdin=True, hb=True, control=True): """Starts the channels for this kernel. For this class, we establish a websocket connection to the destination and setup the channel-based queues on which applicable messages will be posted. """ ws_url = url_path_join( GatewayClient.instance().ws_url, GatewayClient.instance().kernels_endpoint, url_escape(self.kernel_id), "channels", ) # Gather cert info in case where ssl is desired... ssl_options = dict() ssl_options["ca_certs"] = GatewayClient.instance().ca_certs ssl_options["certfile"] = GatewayClient.instance().client_cert ssl_options["keyfile"] = GatewayClient.instance().client_key self.channel_socket = websocket.create_connection( ws_url, timeout=GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT, enable_multithread=True, sslopt=ssl_options, ) self.response_router = Thread(target=self._route_responses) self.response_router.start() await ensure_async( super().start_channels(shell=shell, iopub=iopub, stdin=stdin, hb=hb, control=control) ) def stop_channels(self): """Stops all the running channels for this kernel. For this class, we close the websocket connection and destroy the channel-based queues. """ super().stop_channels() self._channels_stopped = True self.log.debug("Closing websocket connection") self.channel_socket.close() self.response_router.join() if self._channel_queues: self._channel_queues.clear() self._channel_queues = None # Channels are implemented via a ChannelQueue that is used to send and receive messages @property def shell_channel(self): """Get the shell channel object for this kernel.""" if self._shell_channel is None: self.log.debug("creating shell channel queue") self._shell_channel = ChannelQueue("shell", self.channel_socket, self.log) self._channel_queues["shell"] = self._shell_channel return self._shell_channel @property def iopub_channel(self): """Get the iopub channel object for this kernel.""" if self._iopub_channel is None: self.log.debug("creating iopub channel queue") self._iopub_channel = ChannelQueue("iopub", self.channel_socket, self.log) self._channel_queues["iopub"] = self._iopub_channel return self._iopub_channel @property def stdin_channel(self): """Get the stdin channel object for this kernel.""" if self._stdin_channel is None: self.log.debug("creating stdin channel queue") self._stdin_channel = ChannelQueue("stdin", self.channel_socket, self.log) self._channel_queues["stdin"] = self._stdin_channel return self._stdin_channel @property def hb_channel(self): """Get the hb channel object for this kernel.""" if self._hb_channel is None: self.log.debug("creating hb channel queue") self._hb_channel = HBChannelQueue("hb", self.channel_socket, self.log) self._channel_queues["hb"] = self._hb_channel return self._hb_channel @property def control_channel(self): """Get the control channel object for this kernel.""" if self._control_channel is None: self.log.debug("creating control channel queue") self._control_channel = ChannelQueue("control", self.channel_socket, self.log) self._channel_queues["control"] = self._control_channel return self._control_channel def _route_responses(self): """ Reads responses from the websocket and routes each to the appropriate channel queue based on the message's channel. It does this for the duration of the class's lifetime until the channels are stopped, at which time the socket is closed (unblocking the router) and the thread terminates. If shutdown happens to occur while processing a response (unlikely), termination takes place via the loop control boolean. """ try: while not self._channels_stopped: raw_message = self.channel_socket.recv() if not raw_message: break response_message = json_decode(utf8(raw_message)) channel = response_message["channel"] self._channel_queues[channel].put_nowait(response_message) except websocket.WebSocketConnectionClosedException: pass # websocket closure most likely due to shutdown except BaseException as be: if not self._channels_stopped: self.log.warning("Unexpected exception encountered ({})".format(be)) self.log.debug("Response router thread exiting...") KernelClientABC.register(GatewayKernelClient) jupyter_server-1.13.1/jupyter_server/i18n/000077500000000000000000000000001415445537200205465ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/i18n/README.md000066400000000000000000000131431415445537200220270ustar00rootroot00000000000000# Implementation Notes for Internationalization of Jupyter Notebook The implementation of i18n features for jupyter notebook is still a work-in-progress: - User interface strings are (mostly) handled - Console messages are not handled (their usefulness in a translated environment is questionable) - Tooling has to be refined However… ## How the language is selected ? 1. `jupyter notebook` command reads the `LANG` environment variable at startup, (`xx_XX` or just `xx` form, where `xx` is the language code you're wanting to run in). Hint: if running Windows, you can set it in PowerShell with `${Env:LANG} = "xx_XX"`. if running Ubuntu 14, you should set environment variable `LANGUAGE="xx_XX"`. 2. The preferred language for web pages in your browser settings (`xx`) is also used. At the moment, it has to be first in the list. ## Contributing and managing translations ### Requirements - _pybabel_ (could be installed `pip install babel`) - _po2json_ (could be installed with `npm install -g po2json`) **All i18n-related commands are done from the related directory :** cd notebook/i18n/ ### Message extraction The translatable material for notebook is split into 3 `.pot` files, as follows: - _notebook/i18n/notebook.pot_ - Console and startup messages, basically anything that is produced by Python code. - _notebook/i18n/nbui.pot_ - User interface strings, as extracted from the Jinja2 templates in _notebook/templates/\*.html_ - _noteook/i18n/nbjs.pot_ - JavaScript strings and dialogs, which contain much of the visible user interface for Jupyter notebook. To extract the messages from the source code whenever new material is added, use the `pybabel` command: ```shell pybabel extract -F babel_notebook.cfg -o notebook.pot --no-wrap --project Jupyter . pybabel extract -F babel_nbui.cfg -o nbui.pot --no-wrap --project Jupyter . pybabel extract -F babel_nbjs.cfg -o nbjs.pot --no-wrap --project Jupyter . ``` After this is complete you have 3 `.pot` files that you can give to a translator for your favorite language. ### Messages compilation After the source material has been translated, you should have 3 `.po` files with the same base names as the `.pot` files above. Put them in `notebook/i18n/${LANG}/LC_MESSAGES`, where `${LANG}` is the language code for your desired language ( i.e. German = "de", Japanese = "ja", etc. ). _notebook.po_ and _nbui.po_ need to be converted from `.po` to `.mo` format for use at runtime. ```shell pybabel compile -D notebook -f -l ${LANG} -i ${LANG}/LC_MESSAGES/notebook.po -o ${LANG}/LC_MESSAGES/notebook.mo pybabel compile -D nbui -f -l ${LANG} -i ${LANG}/LC_MESSAGES/nbui.po -o ${LANG}/LC_MESSAGES/nbui.mo ``` _nbjs.po_ needs to be converted to JSON for use within the JavaScript code, with _po2json_, as follows: po2json -p -F -f jed1.x -d nbjs ${LANG}/LC_MESSAGES/nbjs.po ${LANG}/LC_MESSAGES/nbjs.json When new languages get added, their language codes should be added to _notebook/i18n/nbjs.json_ under the `supported_languages` element. ### Tips for Jupyter developers The biggest "mistake" I found while doing i18n enablement was the habit of constructing UI messages from English "piece parts". For example, code like: ```javascript var msg = "Enter a new " + type + "name:"; ``` where `type` is either "file", "directory", or "notebook".... is problematic when doing translations, because the surrounding text may need to vary depending on the inserted word. In this case, you need to switch it and use complete phrases, as follows: ```javascript var rename_msg = function (type) { switch (type) { case "file": return _("Enter a new file name:"); case "directory": return _("Enter a new directory name:"); case "notebook": return _("Enter a new notebook name:"); default: return _("Enter a new name:"); } }; ``` Also you need to remember that adding an "s" or "es" to an English word to create the plural form doesn't translate well. Some languages have as many as 5 or 6 different plural forms for differing numbers, so using an API such as ngettext() is necessary in order to handle these cases properly. ### Known issues and future evolutions 1. Right now there are two different places where the desired language is set. At startup time, the Jupyter console's messages pay attention to the setting of the `${LANG}` environment variable as set in the shell at startup time. Unfortunately, this is also the time where the Jinja2 environment is set up, which means that the template stuff will always come from this setting. We really want to be paying attention to the browser's settings for the stuff that happens in the browser, so we need to be able to retrieve this information after the browser is started and somehow communicate this back to Jinja2. So far, I haven't yet figured out how to do this, which means that if the ${LANG} at startup doesn't match the browser's settings, you could potentially get a mix of languages in the UI ( never a good thing ). 2. We will need to decide if console messages should be translatable, and enable them if desired. 3. The keyboard shortcut editor was implemented after the i18n work was completed, so that portion does not have translation support at this time. 4. Babel's documentation has instructions on how to integrate messages extraction into your _setup.py_ so that eventually we can just do: ./setup.py extract_messages I hope to get this working at some point in the near future. 5. The conversions from `.po` to `.mo` probably can and should be done using `setup.py install`. Any questions or comments please let me know @JCEmmons on github (emmo@us.ibm.com) jupyter_server-1.13.1/jupyter_server/i18n/__init__.py000066400000000000000000000051571415445537200226670ustar00rootroot00000000000000"""Server functions for loading translations """ import errno import io import json import re from collections import defaultdict from os.path import dirname from os.path import join as pjoin I18N_DIR = dirname(__file__) # Cache structure: # {'nbjs': { # Domain # 'zh-CN': { # Language code # : # ... # } # }} TRANSLATIONS_CACHE = {"nbjs": {}} _accept_lang_re = re.compile( r""" (?P[a-zA-Z]{1,8}(-[a-zA-Z]{1,8})?) (\s*;\s*q\s*=\s* (?P[01](.\d+)?) )?""", re.VERBOSE, ) def parse_accept_lang_header(accept_lang): """Parses the 'Accept-Language' HTTP header. Returns a list of language codes in *ascending* order of preference (with the most preferred language last). """ by_q = defaultdict(list) for part in accept_lang.split(","): m = _accept_lang_re.match(part.strip()) if not m: continue lang, qvalue = m.group("lang", "qvalue") # Browser header format is zh-CN, gettext uses zh_CN lang = lang.replace("-", "_") if qvalue is None: qvalue = 1.0 else: qvalue = float(qvalue) if qvalue == 0: continue # 0 means not accepted by_q[qvalue].append(lang) res = [] for qvalue, langs in sorted(by_q.items()): res.extend(sorted(langs)) return res def load(language, domain="nbjs"): """Load translations from an nbjs.json file""" try: f = io.open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") except IOError as e: if e.errno != errno.ENOENT: raise return {} with f: data = json.load(f) return data["locale_data"][domain] def cached_load(language, domain="nbjs"): """Load translations for one language, using in-memory cache if available""" domain_cache = TRANSLATIONS_CACHE[domain] try: return domain_cache[language] except KeyError: data = load(language, domain) domain_cache[language] = data return data def combine_translations(accept_language, domain="nbjs"): """Combine translations for multiple accepted languages. Returns data re-packaged in jed1.x format. """ lang_codes = parse_accept_lang_header(accept_language) combined = {} for language in lang_codes: if language == "en": # en is default, all translations are in frontend. combined.clear() else: combined.update(cached_load(language, domain)) combined[""] = {"domain": "nbjs"} return {"domain": domain, "locale_data": {domain: combined}} jupyter_server-1.13.1/jupyter_server/i18n/babel_nbui.cfg000066400000000000000000000001501415445537200233050ustar00rootroot00000000000000[jinja2: notebook/templates/**.html] encoding = utf-8 [extractors] jinja2 = jinja2.ext:babel_extract jupyter_server-1.13.1/jupyter_server/i18n/babel_notebook.cfg000066400000000000000000000001021415445537200241650ustar00rootroot00000000000000[python: notebook/*.py] [python: notebook/services/contents/*.py] jupyter_server-1.13.1/jupyter_server/i18n/nbjs.json000066400000000000000000000002241415445537200223730ustar00rootroot00000000000000{ "domain": "nbjs", "supported_languages": ["zh-CN"], "locale_data": { "nbjs": { "": { "domain": "nbjs" } } } } jupyter_server-1.13.1/jupyter_server/i18n/nbui.pot000066400000000000000000000356201415445537200222350ustar00rootroot00000000000000# Translations template for Jupyter. # Copyright (C) 2017 ORGANIZATION # This file is distributed under the same license as the Jupyter project. # FIRST AUTHOR , 2017. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: Jupyter VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" "POT-Creation-Date: 2017-07-07 12:48-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.3.4\n" #: notebook/templates/404.html:3 msgid "You are requesting a page that does not exist!" msgstr "" #: notebook/templates/edit.html:37 msgid "current mode" msgstr "" #: notebook/templates/edit.html:48 notebook/templates/notebook.html:78 msgid "File" msgstr "" #: notebook/templates/edit.html:50 notebook/templates/tree.html:57 msgid "New" msgstr "" #: notebook/templates/edit.html:51 msgid "Save" msgstr "" #: notebook/templates/edit.html:52 notebook/templates/tree.html:36 msgid "Rename" msgstr "" #: notebook/templates/edit.html:53 notebook/templates/tree.html:38 msgid "Download" msgstr "" #: notebook/templates/edit.html:56 notebook/templates/notebook.html:131 #: notebook/templates/tree.html:41 msgid "Edit" msgstr "" #: notebook/templates/edit.html:58 msgid "Find" msgstr "" #: notebook/templates/edit.html:59 msgid "Find & Replace" msgstr "" #: notebook/templates/edit.html:61 msgid "Key Map" msgstr "" #: notebook/templates/edit.html:62 msgid "Default" msgstr "" #: notebook/templates/edit.html:63 msgid "Sublime Text" msgstr "" #: notebook/templates/edit.html:68 notebook/templates/notebook.html:159 #: notebook/templates/tree.html:40 msgid "View" msgstr "" #: notebook/templates/edit.html:70 notebook/templates/notebook.html:162 msgid "Show/Hide the logo and notebook title (above menu bar)" msgstr "" #: notebook/templates/edit.html:71 notebook/templates/notebook.html:163 msgid "Toggle Header" msgstr "" #: notebook/templates/edit.html:72 notebook/templates/notebook.html:171 msgid "Toggle Line Numbers" msgstr "" #: notebook/templates/edit.html:75 msgid "Language" msgstr "" #: notebook/templates/error.html:23 msgid "The error was:" msgstr "" #: notebook/templates/login.html:24 msgid "Password or token:" msgstr "" #: notebook/templates/login.html:26 msgid "Password:" msgstr "" #: notebook/templates/login.html:31 msgid "Log in" msgstr "" #: notebook/templates/login.html:39 msgid "No login available, you shouldn't be seeing this page." msgstr "" #: notebook/templates/logout.html:24 #, python-format msgid "Proceed to the dashboard" msgstr "" #: notebook/templates/logout.html:26 #, python-format msgid "Proceed to the login page" msgstr "" #: notebook/templates/notebook.html:62 msgid "Menu" msgstr "" #: notebook/templates/notebook.html:65 notebook/templates/notebook.html:254 msgid "Kernel" msgstr "" #: notebook/templates/notebook.html:68 msgid "This notebook is read-only" msgstr "" #: notebook/templates/notebook.html:81 msgid "New Notebook" msgstr "" #: notebook/templates/notebook.html:85 msgid "Opens a new window with the Dashboard view" msgstr "" #: notebook/templates/notebook.html:86 msgid "Open..." msgstr "" #: notebook/templates/notebook.html:90 msgid "Open a copy of this notebook's contents and start a new kernel" msgstr "" #: notebook/templates/notebook.html:91 msgid "Make a Copy..." msgstr "" #: notebook/templates/notebook.html:92 msgid "Rename..." msgstr "" #: notebook/templates/notebook.html:93 msgid "Save and Checkpoint" msgstr "" #: notebook/templates/notebook.html:96 msgid "Revert to Checkpoint" msgstr "" #: notebook/templates/notebook.html:106 msgid "Print Preview" msgstr "" #: notebook/templates/notebook.html:107 msgid "Download as" msgstr "" #: notebook/templates/notebook.html:109 msgid "Notebook (.ipynb)" msgstr "" #: notebook/templates/notebook.html:110 msgid "Script" msgstr "" #: notebook/templates/notebook.html:111 msgid "HTML (.html)" msgstr "" #: notebook/templates/notebook.html:112 msgid "Markdown (.md)" msgstr "" #: notebook/templates/notebook.html:113 msgid "reST (.rst)" msgstr "" #: notebook/templates/notebook.html:114 msgid "LaTeX (.tex)" msgstr "" #: notebook/templates/notebook.html:115 msgid "PDF via LaTeX (.pdf)" msgstr "" #: notebook/templates/notebook.html:118 msgid "Deploy as" msgstr "" #: notebook/templates/notebook.html:123 msgid "Trust the output of this notebook" msgstr "" #: notebook/templates/notebook.html:124 msgid "Trust Notebook" msgstr "" #: notebook/templates/notebook.html:127 msgid "Shutdown this notebook's kernel, and close this window" msgstr "" #: notebook/templates/notebook.html:128 msgid "Close and Halt" msgstr "" #: notebook/templates/notebook.html:133 msgid "Cut Cells" msgstr "" #: notebook/templates/notebook.html:134 msgid "Copy Cells" msgstr "" #: notebook/templates/notebook.html:135 msgid "Paste Cells Above" msgstr "" #: notebook/templates/notebook.html:136 msgid "Paste Cells Below" msgstr "" #: notebook/templates/notebook.html:137 msgid "Paste Cells & Replace" msgstr "" #: notebook/templates/notebook.html:138 msgid "Delete Cells" msgstr "" #: notebook/templates/notebook.html:139 msgid "Undo Delete Cells" msgstr "" #: notebook/templates/notebook.html:141 msgid "Split Cell" msgstr "" #: notebook/templates/notebook.html:142 msgid "Merge Cell Above" msgstr "" #: notebook/templates/notebook.html:143 msgid "Merge Cell Below" msgstr "" #: notebook/templates/notebook.html:145 msgid "Move Cell Up" msgstr "" #: notebook/templates/notebook.html:146 msgid "Move Cell Down" msgstr "" #: notebook/templates/notebook.html:148 msgid "Edit Notebook Metadata" msgstr "" #: notebook/templates/notebook.html:150 msgid "Find and Replace" msgstr "" #: notebook/templates/notebook.html:152 msgid "Cut Cell Attachments" msgstr "" #: notebook/templates/notebook.html:153 msgid "Copy Cell Attachments" msgstr "" #: notebook/templates/notebook.html:154 msgid "Paste Cell Attachments" msgstr "" #: notebook/templates/notebook.html:156 msgid "Insert Image" msgstr "" #: notebook/templates/notebook.html:166 msgid "Show/Hide the action icons (below menu bar)" msgstr "" #: notebook/templates/notebook.html:167 msgid "Toggle Toolbar" msgstr "" #: notebook/templates/notebook.html:170 msgid "Show/Hide line numbers in cells" msgstr "" #: notebook/templates/notebook.html:174 msgid "Cell Toolbar" msgstr "" #: notebook/templates/notebook.html:179 msgid "Insert" msgstr "" #: notebook/templates/notebook.html:182 msgid "Insert an empty Code cell above the currently active cell" msgstr "" #: notebook/templates/notebook.html:183 msgid "Insert Cell Above" msgstr "" #: notebook/templates/notebook.html:185 msgid "Insert an empty Code cell below the currently active cell" msgstr "" #: notebook/templates/notebook.html:186 msgid "Insert Cell Below" msgstr "" #: notebook/templates/notebook.html:189 msgid "Cell" msgstr "" #: notebook/templates/notebook.html:191 msgid "Run this cell, and move cursor to the next one" msgstr "" #: notebook/templates/notebook.html:192 msgid "Run Cells" msgstr "" #: notebook/templates/notebook.html:193 msgid "Run this cell, select below" msgstr "" #: notebook/templates/notebook.html:194 msgid "Run Cells and Select Below" msgstr "" #: notebook/templates/notebook.html:195 msgid "Run this cell, insert below" msgstr "" #: notebook/templates/notebook.html:196 msgid "Run Cells and Insert Below" msgstr "" #: notebook/templates/notebook.html:197 msgid "Run all cells in the notebook" msgstr "" #: notebook/templates/notebook.html:198 msgid "Run All" msgstr "" #: notebook/templates/notebook.html:199 msgid "Run all cells above (but not including) this cell" msgstr "" #: notebook/templates/notebook.html:200 msgid "Run All Above" msgstr "" #: notebook/templates/notebook.html:201 msgid "Run this cell and all cells below it" msgstr "" #: notebook/templates/notebook.html:202 msgid "Run All Below" msgstr "" #: notebook/templates/notebook.html:205 msgid "All cells in the notebook have a cell type. By default, new cells are created as 'Code' cells" msgstr "" #: notebook/templates/notebook.html:206 msgid "Cell Type" msgstr "" #: notebook/templates/notebook.html:209 msgid "Contents will be sent to the kernel for execution, and output will display in the footer of cell" msgstr "" #: notebook/templates/notebook.html:212 msgid "Contents will be rendered as HTML and serve as explanatory text" msgstr "" #: notebook/templates/notebook.html:213 notebook/templates/notebook.html:298 msgid "Markdown" msgstr "" #: notebook/templates/notebook.html:215 msgid "Contents will pass through nbconvert unmodified" msgstr "" #: notebook/templates/notebook.html:216 msgid "Raw NBConvert" msgstr "" #: notebook/templates/notebook.html:220 msgid "Current Outputs" msgstr "" #: notebook/templates/notebook.html:223 msgid "Hide/Show the output of the current cell" msgstr "" #: notebook/templates/notebook.html:224 notebook/templates/notebook.html:240 msgid "Toggle" msgstr "" #: notebook/templates/notebook.html:227 msgid "Scroll the output of the current cell" msgstr "" #: notebook/templates/notebook.html:228 notebook/templates/notebook.html:244 msgid "Toggle Scrolling" msgstr "" #: notebook/templates/notebook.html:231 msgid "Clear the output of the current cell" msgstr "" #: notebook/templates/notebook.html:232 notebook/templates/notebook.html:248 msgid "Clear" msgstr "" #: notebook/templates/notebook.html:236 msgid "All Output" msgstr "" #: notebook/templates/notebook.html:239 msgid "Hide/Show the output of all cells" msgstr "" #: notebook/templates/notebook.html:243 msgid "Scroll the output of all cells" msgstr "" #: notebook/templates/notebook.html:247 msgid "Clear the output of all cells" msgstr "" #: notebook/templates/notebook.html:257 msgid "Send Keyboard Interrupt (CTRL-C) to the Kernel" msgstr "" #: notebook/templates/notebook.html:258 msgid "Interrupt" msgstr "" #: notebook/templates/notebook.html:261 msgid "Restart the Kernel" msgstr "" #: notebook/templates/notebook.html:262 msgid "Restart" msgstr "" #: notebook/templates/notebook.html:265 msgid "Restart the Kernel and clear all output" msgstr "" #: notebook/templates/notebook.html:266 msgid "Restart & Clear Output" msgstr "" #: notebook/templates/notebook.html:269 msgid "Restart the Kernel and re-run the notebook" msgstr "" #: notebook/templates/notebook.html:270 msgid "Restart & Run All" msgstr "" #: notebook/templates/notebook.html:273 msgid "Reconnect to the Kernel" msgstr "" #: notebook/templates/notebook.html:274 msgid "Reconnect" msgstr "" #: notebook/templates/notebook.html:282 msgid "Change kernel" msgstr "" #: notebook/templates/notebook.html:287 msgid "Help" msgstr "" #: notebook/templates/notebook.html:290 msgid "A quick tour of the notebook user interface" msgstr "" #: notebook/templates/notebook.html:290 msgid "User Interface Tour" msgstr "" #: notebook/templates/notebook.html:291 msgid "Opens a tooltip with all keyboard shortcuts" msgstr "" #: notebook/templates/notebook.html:291 msgid "Keyboard Shortcuts" msgstr "" #: notebook/templates/notebook.html:292 msgid "Opens a dialog allowing you to edit Keyboard shortcuts" msgstr "" #: notebook/templates/notebook.html:292 msgid "Edit Keyboard Shortcuts" msgstr "" #: notebook/templates/notebook.html:297 msgid "Notebook Help" msgstr "" #: notebook/templates/notebook.html:303 msgid "Opens in a new window" msgstr "" #: notebook/templates/notebook.html:319 msgid "About Jupyter Notebook" msgstr "" #: notebook/templates/notebook.html:319 msgid "About" msgstr "" #: notebook/templates/page.html:114 msgid "Jupyter Notebook requires JavaScript." msgstr "" #: notebook/templates/page.html:115 msgid "Please enable it to proceed. " msgstr "" #: notebook/templates/page.html:121 msgid "dashboard" msgstr "" #: notebook/templates/page.html:132 msgid "Logout" msgstr "" #: notebook/templates/page.html:134 msgid "Login" msgstr "" #: notebook/templates/tree.html:23 msgid "Files" msgstr "" #: notebook/templates/tree.html:24 msgid "Running" msgstr "" #: notebook/templates/tree.html:25 msgid "Clusters" msgstr "" #: notebook/templates/tree.html:32 msgid "Select items to perform actions on them." msgstr "" #: notebook/templates/tree.html:35 msgid "Duplicate selected" msgstr "" #: notebook/templates/tree.html:35 msgid "Duplicate" msgstr "" #: notebook/templates/tree.html:36 msgid "Rename selected" msgstr "" #: notebook/templates/tree.html:37 msgid "Move selected" msgstr "" #: notebook/templates/tree.html:37 msgid "Move" msgstr "" #: notebook/templates/tree.html:38 msgid "Download selected" msgstr "" #: notebook/templates/tree.html:39 msgid "Shutdown selected notebook(s)" msgstr "" #: notebook/templates/notebook.html:278 #: notebook/templates/tree.html:39 msgid "Shutdown" msgstr "" #: notebook/templates/tree.html:40 msgid "View selected" msgstr "" #: notebook/templates/tree.html:41 msgid "Edit selected" msgstr "" #: notebook/templates/tree.html:42 msgid "Delete selected" msgstr "" #: notebook/templates/tree.html:50 msgid "Click to browse for a file to upload." msgstr "" #: notebook/templates/tree.html:51 msgid "Upload" msgstr "" #: notebook/templates/tree.html:65 msgid "Text File" msgstr "" #: notebook/templates/tree.html:68 msgid "Folder" msgstr "" #: notebook/templates/tree.html:72 msgid "Terminal" msgstr "" #: notebook/templates/tree.html:76 msgid "Terminals Unavailable" msgstr "" #: notebook/templates/tree.html:82 msgid "Refresh notebook list" msgstr "" #: notebook/templates/tree.html:90 msgid "Select All / None" msgstr "" #: notebook/templates/tree.html:93 msgid "Select..." msgstr "" #: notebook/templates/tree.html:98 msgid "Select All Folders" msgstr "" #: notebook/templates/tree.html:98 msgid " Folders" msgstr "" #: notebook/templates/tree.html:99 msgid "Select All Notebooks" msgstr "" #: notebook/templates/tree.html:99 msgid " All Notebooks" msgstr "" #: notebook/templates/tree.html:100 msgid "Select Running Notebooks" msgstr "" #: notebook/templates/tree.html:100 msgid " Running" msgstr "" #: notebook/templates/tree.html:101 msgid "Select All Files" msgstr "" #: notebook/templates/tree.html:101 msgid " Files" msgstr "" #: notebook/templates/tree.html:114 msgid "Last Modified" msgstr "" #: notebook/templates/tree.html:120 msgid "Name" msgstr "" #: notebook/templates/tree.html:130 msgid "Currently running Jupyter processes" msgstr "" #: notebook/templates/tree.html:134 msgid "Refresh running list" msgstr "" #: notebook/templates/tree.html:150 msgid "There are no terminals running." msgstr "" #: notebook/templates/tree.html:152 msgid "Terminals are unavailable." msgstr "" #: notebook/templates/tree.html:162 msgid "Notebooks" msgstr "" #: notebook/templates/tree.html:169 msgid "There are no notebooks running." msgstr "" #: notebook/templates/tree.html:178 msgid "Clusters tab is now provided by IPython parallel." msgstr "" #: notebook/templates/tree.html:179 msgid "See 'IPython parallel' for installation details." msgstr "" jupyter_server-1.13.1/jupyter_server/i18n/notebook.pot000066400000000000000000000266771415445537200231340ustar00rootroot00000000000000# Translations template for Jupyter. # Copyright (C) 2017 ORGANIZATION # This file is distributed under the same license as the Jupyter project. # FIRST AUTHOR , 2017. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: Jupyter VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" "POT-Creation-Date: 2017-07-08 21:52-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.3.4\n" #: jupyter_server/serverapp.py:53 msgid "The Jupyter Server requires tornado >= 4.0" msgstr "" #: jupyter_server/serverapp.py:57 msgid "The Jupyter Server requires tornado >= 4.0, but you have < 1.1.0" msgstr "" #: jupyter_server/serverapp.py:59 #, python-format msgid "The Jupyter Server requires tornado >= 4.0, but you have %s" msgstr "" #: jupyter_server/serverapp.py:389 msgid "List currently running Jupyter servers." msgstr "" #: jupyter_server/serverapp.py:393 msgid "Produce machine-readable JSON output." msgstr "" #: jupyter_server/serverapp.py:397 msgid "If True, each line of output will be a JSON object with the details from the server info file." msgstr "" #: jupyter_server/serverapp.py:402 msgid "Currently running servers:" msgstr "" #: jupyter_server/serverapp.py:419 msgid "Don't open the jupyter_server in a browser after startup." msgstr "" #: jupyter_server/serverapp.py:423 msgid "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." msgstr "" #: jupyter_server/serverapp.py:439 msgid "Allow the server to be run from root user." msgstr "" #: jupyter_server/serverapp.py:470 msgid "" "The Jupyter Server.\n" " \n" " This launches a Tornado-based Jupyter Server." msgstr "" #: jupyter_server/serverapp.py:540 msgid "Set the Access-Control-Allow-Credentials: true header" msgstr "" #: jupyter_server/serverapp.py:544 msgid "Whether to allow the user to run the Jupyter server as root." msgstr "" #: jupyter_server/serverapp.py:548 msgid "The default URL to redirect to from `/`" msgstr "" #: jupyter_server/serverapp.py:552 msgid "The IP address the Jupyter server will listen on." msgstr "" #: jupyter_server/serverapp.py:565 #, python-format msgid "" "Cannot bind to localhost, using 127.0.0.1 as default ip\n" "%s" msgstr "" #: jupyter_server/serverapp.py:579 msgid "The port the Jupyter server will listen on." msgstr "" #: jupyter_server/serverapp.py:583 msgid "The number of additional ports to try if the specified port is not available." msgstr "" #: jupyter_server/serverapp.py:587 msgid "The full path to an SSL/TLS certificate file." msgstr "" #: jupyter_server/serverapp.py:591 msgid "The full path to a private key file for usage with SSL/TLS." msgstr "" #: jupyter_server/serverapp.py:595 msgid "The full path to a certificate authority certificate for SSL/TLS client authentication." msgstr "" #: jupyter_server/serverapp.py:599 msgid "The file where the cookie secret is stored." msgstr "" #: jupyter_server/serverapp.py:628 #, python-format msgid "Writing Jupyter server cookie secret to %s" msgstr "" #: jupyter_server/serverapp.py:635 #, python-format msgid "Could not set permissions on %s" msgstr "" #: jupyter_server/serverapp.py:640 msgid "" "Token used for authenticating first-time connections to the server.\n" "\n" " When no password is enabled,\n" " the default is to generate a new, random token.\n" "\n" " Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED.\n" " " msgstr "" #: jupyter_server/serverapp.py:650 msgid "" "One-time token used for opening a browser.\n" " Once used, this token cannot be used again.\n" " " msgstr "" #: jupyter_server/serverapp.py:726 msgid "" "Specify Where to open the server on startup. This is the\n" " `new` argument passed to the standard library method `webbrowser.open`.\n" " The behaviour is not guaranteed, but depends on browser support. Valid\n" " values are:\n" " 2 opens a new tab,\n" " 1 opens a new window,\n" " 0 opens in an existing window.\n" " See the `webbrowser.open` documentation for details.\n" " " msgstr "" #: jupyter_server/serverapp.py:742 msgid "" "\n" " webapp_settings is deprecated, use tornado_settings.\n" msgstr "" #: jupyter_server/serverapp.py:746 msgid "Supply overrides for the tornado.web.Application that the Jupyter server uses." msgstr "" #: jupyter_server/serverapp.py:750 msgid "" "\n" " Set the tornado compression options for websocket connections.\n" "\n" " This value will be returned from :meth:`WebSocketHandler.get_compression_options`.\n" " None (default) will disable compression.\n" " A dict (even an empty one) will enable compression.\n" "\n" " See the tornado docs for WebSocketHandler.get_compression_options for details.\n" " " msgstr "" #: jupyter_server/serverapp.py:761 msgid "Supply overrides for terminado. Currently only supports \"shell_command\"." msgstr "" #: jupyter_server/serverapp.py:764 msgid "Extra keyword arguments to pass to `set_secure_cookie`. See tornado's set_secure_cookie docs for details." msgstr "" #: jupyter_server/serverapp.py:768 msgid "" "Supply SSL options for the tornado HTTPServer.\n" " See the tornado docs for details." msgstr "" #: jupyter_server/serverapp.py:772 msgid "Supply extra arguments that will be passed to Jinja environment." msgstr "" #: jupyter_server/serverapp.py:776 msgid "Extra variables to supply to jinja templates when rendering." msgstr "" #: jupyter_server/serverapp.py:816 msgid "base_project_url is deprecated, use base_url" msgstr "" #: jupyter_server/serverapp.py:832 msgid "Path to search for custom.js, css" msgstr "" #: jupyter_server/serverapp.py:844 msgid "" "Extra paths to search for serving jinja templates.\n" "\n" " Can be used to override templates from jupyter_server.templates." msgstr "" #: jupyter_server/serverapp.py:900 #, python-format msgid "Using MathJax: %s" msgstr "" #: jupyter_server/serverapp.py:903 msgid "The MathJax.js configuration file that is to be used." msgstr "" #: jupyter_server/serverapp.py:908 #, python-format msgid "Using MathJax configuration file: %s" msgstr "" #: jupyter_server/serverapp.py:920 msgid "The kernel manager class to use." msgstr "" #: jupyter_server/serverapp.py:926 msgid "The session manager class to use." msgstr "" #: jupyter_server/serverapp.py:932 msgid "The config manager class to use" msgstr "" #: jupyter_server/serverapp.py:953 msgid "The login handler class to use." msgstr "" #: jupyter_server/serverapp.py:960 msgid "The logout handler class to use." msgstr "" #: jupyter_server/serverapp.py:964 msgid "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headerssent by the upstream reverse proxy. Necessary if the proxy handles SSL" msgstr "" #: jupyter_server/serverapp.py:976 msgid "" "\n" " DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.\n" " " msgstr "" #: jupyter_server/serverapp.py:988 msgid "Support for specifying --pylab on the command line has been removed." msgstr "" #: jupyter_server/serverapp.py:990 msgid "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself." msgstr "" #: jupyter_server/serverapp.py:995 msgid "The directory to use for notebooks and kernels." msgstr "" #: jupyter_server/serverapp.py:1018 #, python-format msgid "No such notebook dir: '%r'" msgstr "" #: jupyter_server/serverapp.py:1036 msgid "server_extensions is deprecated, use jpserver_extensions" msgstr "" #: jupyter_server/serverapp.py:1040 msgid "Dict of Python modules to load as notebook server extensions. Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order." msgstr "" #: jupyter_server/serverapp.py:1049 msgid "Reraise exceptions encountered loading server extensions?" msgstr "" #: jupyter_server/serverapp.py:1052 msgid "" "(msgs/sec)\n" " Maximum rate at which messages can be sent on iopub before they are\n" " limited." msgstr "" #: jupyter_server/serverapp.py:1056 msgid "" "(bytes/sec)\n" " Maximum rate at which stream output can be sent on iopub before they are\n" " limited." msgstr "" #: jupyter_server/serverapp.py:1060 msgid "" "(sec) Time window used to \n" " check the message and data rate limits." msgstr "" #: jupyter_server/serverapp.py:1071 #, python-format msgid "No such file or directory: %s" msgstr "" #: jupyter_server/serverapp.py:1141 msgid "Notebook servers are configured to only be run with a password." msgstr "" #: jupyter_server/serverapp.py:1142 msgid "Hint: run the following command to set a password" msgstr "" #: jupyter_server/serverapp.py:1143 msgid "\t$ python -m jupyter_server.auth password" msgstr "" #: jupyter_server/serverapp.py:1181 #, python-format msgid "The port %i is already in use, trying another port." msgstr "" #: jupyter_server/serverapp.py:1184 #, python-format msgid "Permission to listen on port %i denied" msgstr "" #: jupyter_server/serverapp.py:1193 msgid "ERROR: the Jupyter server could not be started because no available port could be found." msgstr "" #: jupyter_server/serverapp.py:1199 msgid "[all ip addresses on your system]" msgstr "" #: jupyter_server/serverapp.py:1223 #, python-format msgid "Terminals not available (error was %s)" msgstr "" #: jupyter_server/serverapp.py:1259 msgid "interrupted" msgstr "" #: jupyter_server/serverapp.py:1261 msgid "y" msgstr "" #: jupyter_server/serverapp.py:1262 msgid "n" msgstr "" #: jupyter_server/serverapp.py:1263 #, python-format msgid "Shutdown this notebook server (%s/[%s])? " msgstr "" #: jupyter_server/serverapp.py:1269 msgid "Shutdown confirmed" msgstr "" #: jupyter_server/serverapp.py:1273 msgid "No answer for 5s:" msgstr "" #: jupyter_server/serverapp.py:1274 msgid "resuming operation..." msgstr "" #: jupyter_server/serverapp.py:1282 #, python-format msgid "received signal %s, stopping" msgstr "" #: jupyter_server/serverapp.py:1338 #, python-format msgid "Error loading server extension %s" msgstr "" #: jupyter_server/serverapp.py:1369 #, python-format msgid "Shutting down %d kernels" msgstr "" #: jupyter_server/serverapp.py:1375 #, python-format msgid "%d active kernel" msgid_plural "%d active kernels" msgstr[0] "" msgstr[1] "" #: jupyter_server/serverapp.py:1379 #, python-format msgid "" "The Jupyter Notebook is running at:\n" "\r" "%s" msgstr "" #: jupyter_server/serverapp.py:1426 msgid "Running as root is not recommended. Use --allow-root to bypass." msgstr "" #: jupyter_server/serverapp.py:1432 msgid "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." msgstr "" #: jupyter_server/serverapp.py:1434 msgid "Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at http://jupyter.org/community.html." msgstr "" #: jupyter_server/serverapp.py:1445 #, python-format msgid "No web browser found: %s." msgstr "" #: jupyter_server/serverapp.py:1450 #, python-format msgid "%s does not exist" msgstr "" #: jupyter_server/serverapp.py:1484 msgid "Interrupted..." msgstr "" #: jupyter_server/services/contents/filemanager.py:506 #, python-format msgid "Serving notebooks from local directory: %s" msgstr "" #: jupyter_server/services/contents/manager.py:68 msgid "Untitled" msgstr "" jupyter_server-1.13.1/jupyter_server/i18n/zh_CN/000077500000000000000000000000001415445537200215475ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/i18n/zh_CN/LC_MESSAGES/000077500000000000000000000000001415445537200233345ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/i18n/zh_CN/LC_MESSAGES/nbui.po000066400000000000000000000440331415445537200246350ustar00rootroot00000000000000# Translations template for Jupyter. # Copyright (C) 2017 ORGANIZATION # This file is distributed under the same license as the Jupyter project. # FIRST AUTHOR , 2017. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: Jupyter VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" "POT-Creation-Date: 2017-08-25 02:53-0400\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.5.0\n" #: notebook/templates/404.html:3 msgid "You are requesting a page that does not exist!" msgstr "请求的代码不存在!" #: notebook/templates/edit.html:37 msgid "current mode" msgstr "当前模式" #: notebook/templates/edit.html:48 notebook/templates/notebook.html:78 msgid "File" msgstr "文件" #: notebook/templates/edit.html:50 notebook/templates/tree.html:57 msgid "New" msgstr "新建" #: notebook/templates/edit.html:51 msgid "Save" msgstr "保存" #: notebook/templates/edit.html:52 notebook/templates/tree.html:36 msgid "Rename" msgstr "重命名" #: notebook/templates/edit.html:53 notebook/templates/tree.html:38 msgid "Download" msgstr "下载" #: notebook/templates/edit.html:56 notebook/templates/notebook.html:131 #: notebook/templates/tree.html:41 msgid "Edit" msgstr "编辑" #: notebook/templates/edit.html:58 msgid "Find" msgstr "查找" #: notebook/templates/edit.html:59 msgid "Find & Replace" msgstr "查找 & 替换" #: notebook/templates/edit.html:61 msgid "Key Map" msgstr "键值对" #: notebook/templates/edit.html:62 msgid "Default" msgstr "默认" #: notebook/templates/edit.html:63 msgid "Sublime Text" msgstr "代码编辑器" #: notebook/templates/edit.html:68 notebook/templates/notebook.html:159 #: notebook/templates/tree.html:40 msgid "View" msgstr "查看" #: notebook/templates/edit.html:70 notebook/templates/notebook.html:162 msgid "Show/Hide the logo and notebook title (above menu bar)" msgstr "显示/隐藏 标题和logo" #: notebook/templates/edit.html:71 notebook/templates/notebook.html:163 msgid "Toggle Header" msgstr "切换Header" #: notebook/templates/edit.html:72 notebook/templates/notebook.html:171 msgid "Toggle Line Numbers" msgstr "切换行号" #: notebook/templates/edit.html:75 msgid "Language" msgstr "语言" #: notebook/templates/error.html:23 msgid "The error was:" msgstr "错误:" #: notebook/templates/login.html:24 msgid "Password or token:" msgstr "密码或者token:" #: notebook/templates/login.html:26 msgid "Password:" msgstr "密码:" #: notebook/templates/login.html:31 msgid "Log in" msgstr "登录" #: notebook/templates/login.html:39 msgid "No login available, you shouldn't be seeing this page." msgstr "还没有登录, 请先登录." #: notebook/templates/logout.html:31 #, python-format msgid "Proceed to the dashboard" msgstr "进入 指示板" #: notebook/templates/logout.html:33 #, python-format msgid "Proceed to the login page" msgstr "进入 登录页面" #: notebook/templates/notebook.html:62 msgid "Menu" msgstr "菜单" #: notebook/templates/notebook.html:65 notebook/templates/notebook.html:254 msgid "Kernel" msgstr "服务" #: notebook/templates/notebook.html:68 msgid "This notebook is read-only" msgstr "这个代码是只读的" #: notebook/templates/notebook.html:81 msgid "New Notebook" msgstr "新建代码" #: notebook/templates/notebook.html:85 msgid "Opens a new window with the Dashboard view" msgstr "以仪表盘视角打开新的窗口" #: notebook/templates/notebook.html:86 msgid "Open..." msgstr "打开..." #: notebook/templates/notebook.html:90 msgid "Open a copy of this notebook's contents and start a new kernel" msgstr "打开代码内容的副本并启动一个新的服务" #: notebook/templates/notebook.html:91 msgid "Make a Copy..." msgstr "复制..." #: notebook/templates/notebook.html:92 msgid "Rename..." msgstr "重命名..." #: notebook/templates/notebook.html:93 msgid "Save and Checkpoint" msgstr "保存" #: notebook/templates/notebook.html:96 msgid "Revert to Checkpoint" msgstr "恢复" #: notebook/templates/notebook.html:106 msgid "Print Preview" msgstr "打印预览" #: notebook/templates/notebook.html:107 msgid "Download as" msgstr "下载" #: notebook/templates/notebook.html:109 msgid "Notebook (.ipynb)" msgstr "代码(.ipynb)" #: notebook/templates/notebook.html:110 msgid "Script" msgstr "脚本" #: notebook/templates/notebook.html:111 msgid "HTML (.html)" msgstr "" #: notebook/templates/notebook.html:112 msgid "Markdown (.md)" msgstr "" #: notebook/templates/notebook.html:113 msgid "reST (.rst)" msgstr "" #: notebook/templates/notebook.html:114 msgid "LaTeX (.tex)" msgstr "" #: notebook/templates/notebook.html:115 msgid "PDF via LaTeX (.pdf)" msgstr "" #: notebook/templates/notebook.html:118 msgid "Deploy as" msgstr "部署在" #: notebook/templates/notebook.html:123 msgid "Trust the output of this notebook" msgstr "信任代码的输出" #: notebook/templates/notebook.html:124 msgid "Trust Notebook" msgstr "信任代码" #: notebook/templates/notebook.html:127 msgid "Shutdown this notebook's kernel, and close this window" msgstr "关闭代码服务并关闭窗口" #: notebook/templates/notebook.html:128 msgid "Close and Halt" msgstr "关闭" #: notebook/templates/notebook.html:133 msgid "Cut Cells" msgstr "剪切代码块" #: notebook/templates/notebook.html:134 msgid "Copy Cells" msgstr "复制代码块" #: notebook/templates/notebook.html:135 msgid "Paste Cells Above" msgstr "粘贴到上面" #: notebook/templates/notebook.html:136 msgid "Paste Cells Below" msgstr "粘贴到下面" #: notebook/templates/notebook.html:137 msgid "Paste Cells & Replace" msgstr "粘贴代码块 & 替换" #: notebook/templates/notebook.html:138 msgid "Delete Cells" msgstr "删除代码块" #: notebook/templates/notebook.html:139 msgid "Undo Delete Cells" msgstr "撤销删除" #: notebook/templates/notebook.html:141 msgid "Split Cell" msgstr "分割代码块" #: notebook/templates/notebook.html:142 msgid "Merge Cell Above" msgstr "合并上面的代码块" #: notebook/templates/notebook.html:143 msgid "Merge Cell Below" msgstr "合并下面的代码块" #: notebook/templates/notebook.html:145 msgid "Move Cell Up" msgstr "上移代码块" #: notebook/templates/notebook.html:146 msgid "Move Cell Down" msgstr "下移代码块" #: notebook/templates/notebook.html:148 msgid "Edit Notebook Metadata" msgstr "编辑界面元数据" #: notebook/templates/notebook.html:150 msgid "Find and Replace" msgstr "查找并替换" #: notebook/templates/notebook.html:152 msgid "Cut Cell Attachments" msgstr "剪切附件" #: notebook/templates/notebook.html:153 msgid "Copy Cell Attachments" msgstr "复制附件" #: notebook/templates/notebook.html:154 msgid "Paste Cell Attachments" msgstr "粘贴附件" #: notebook/templates/notebook.html:156 msgid "Insert Image" msgstr "插入图片" #: notebook/templates/notebook.html:166 msgid "Show/Hide the action icons (below menu bar)" msgstr "显示/隐藏 操作图标" #: notebook/templates/notebook.html:167 msgid "Toggle Toolbar" msgstr "" #: notebook/templates/notebook.html:170 msgid "Show/Hide line numbers in cells" msgstr "显示/隐藏行号" #: notebook/templates/notebook.html:174 msgid "Cell Toolbar" msgstr "单元格工具栏" #: notebook/templates/notebook.html:179 msgid "Insert" msgstr "插入" #: notebook/templates/notebook.html:182 msgid "Insert an empty Code cell above the currently active cell" msgstr "在当前活动单元上插入一个空的代码单元格" #: notebook/templates/notebook.html:183 msgid "Insert Cell Above" msgstr "插入单元格上面" #: notebook/templates/notebook.html:185 msgid "Insert an empty Code cell below the currently active cell" msgstr "在当前活动单元下面插入一个空的代码单元格" #: notebook/templates/notebook.html:186 msgid "Insert Cell Below" msgstr "插入单元格下面" #: notebook/templates/notebook.html:189 msgid "Cell" msgstr "单元格" #: notebook/templates/notebook.html:191 msgid "Run this cell, and move cursor to the next one" msgstr "运行这个单元格,并将光标移到下一个" #: notebook/templates/notebook.html:192 msgid "Run Cells" msgstr "运行所有单元格" #: notebook/templates/notebook.html:193 msgid "Run this cell, select below" msgstr "运行此单元,选择以下选项" #: notebook/templates/notebook.html:194 msgid "Run Cells and Select Below" msgstr "运行单元格并自动选择下一个" #: notebook/templates/notebook.html:195 msgid "Run this cell, insert below" msgstr "运行单元格并选择以下" #: notebook/templates/notebook.html:196 msgid "Run Cells and Insert Below" msgstr "运行单元格并在下面插入" #: notebook/templates/notebook.html:197 msgid "Run all cells in the notebook" msgstr "运行所有的单元格" #: notebook/templates/notebook.html:198 msgid "Run All" msgstr "运行所有" #: notebook/templates/notebook.html:199 msgid "Run all cells above (but not including) this cell" msgstr "运行上面的所有单元(但不包括)这个单元格" #: notebook/templates/notebook.html:200 msgid "Run All Above" msgstr "运行上面的代码块" #: notebook/templates/notebook.html:201 msgid "Run this cell and all cells below it" msgstr "运行当前及以下代码块" #: notebook/templates/notebook.html:202 msgid "Run All Below" msgstr "运行下面的代码块" #: notebook/templates/notebook.html:205 msgid "All cells in the notebook have a cell type. By default, new cells are created as 'Code' cells" msgstr "代码里的所有单元格都有一个类型. 默认情况下, 新单元被创建为'Code'单元格" #: notebook/templates/notebook.html:206 msgid "Cell Type" msgstr "单元格类型" #: notebook/templates/notebook.html:209 msgid "Contents will be sent to the kernel for execution, and output will display in the footer of cell" msgstr "内容将被发送到内核以执行, 输出将显示在单元格的页脚." #: notebook/templates/notebook.html:212 msgid "Contents will be rendered as HTML and serve as explanatory text" msgstr "内容将以HTML形式呈现, 并作为解释性文本" #: notebook/templates/notebook.html:213 notebook/templates/notebook.html:298 msgid "Markdown" msgstr "标签" #: notebook/templates/notebook.html:215 msgid "Contents will pass through nbconvert unmodified" msgstr "内容将通过未经修改的nbconvert" #: notebook/templates/notebook.html:216 msgid "Raw NBConvert" msgstr "原生 NBConvert" #: notebook/templates/notebook.html:220 msgid "Current Outputs" msgstr "当前输出" #: notebook/templates/notebook.html:223 msgid "Hide/Show the output of the current cell" msgstr "隐藏/显示当前单元格输出" #: notebook/templates/notebook.html:224 notebook/templates/notebook.html:240 msgid "Toggle" msgstr "切换" #: notebook/templates/notebook.html:227 msgid "Scroll the output of the current cell" msgstr "滚动当前单元格的输出" #: notebook/templates/notebook.html:228 notebook/templates/notebook.html:244 msgid "Toggle Scrolling" msgstr "切换滚动" #: notebook/templates/notebook.html:231 msgid "Clear the output of the current cell" msgstr "清除当前单元格的输出" #: notebook/templates/notebook.html:232 notebook/templates/notebook.html:248 msgid "Clear" msgstr "清空" #: notebook/templates/notebook.html:236 msgid "All Output" msgstr "所有输出" #: notebook/templates/notebook.html:239 msgid "Hide/Show the output of all cells" msgstr "隐藏/显示 所有代码块的输出" #: notebook/templates/notebook.html:243 msgid "Scroll the output of all cells" msgstr "滚动所有单元格的输出" #: notebook/templates/notebook.html:247 msgid "Clear the output of all cells" msgstr "清空所有代码块的输出" #: notebook/templates/notebook.html:257 msgid "Send Keyboard Interrupt (CTRL-C) to the Kernel" msgstr "按下CTRL-C 中断服务" #: notebook/templates/notebook.html:258 msgid "Interrupt" msgstr "中断" #: notebook/templates/notebook.html:261 msgid "Restart the Kernel" msgstr "重启服务" #: notebook/templates/notebook.html:262 msgid "Restart" msgstr "重启" #: notebook/templates/notebook.html:265 msgid "Restart the Kernel and clear all output" msgstr "重启服务并清空所有输出" #: notebook/templates/notebook.html:266 msgid "Restart & Clear Output" msgstr "重启 & 清空输出" #: notebook/templates/notebook.html:269 msgid "Restart the Kernel and re-run the notebook" msgstr "重启服务并且重新运行代码" #: notebook/templates/notebook.html:270 msgid "Restart & Run All" msgstr "重启 & 运行所有" #: notebook/templates/notebook.html:273 msgid "Reconnect to the Kernel" msgstr "重新连接服务" #: notebook/templates/notebook.html:274 msgid "Reconnect" msgstr "重连" #: notebook/templates/notebook.html:282 msgid "Change kernel" msgstr "改变服务" #: notebook/templates/notebook.html:287 msgid "Help" msgstr "帮助" #: notebook/templates/notebook.html:290 msgid "A quick tour of the notebook user interface" msgstr "快速浏览一下notebook用户界面" #: notebook/templates/notebook.html:290 msgid "User Interface Tour" msgstr "用户界面之旅" #: notebook/templates/notebook.html:291 msgid "Opens a tooltip with all keyboard shortcuts" msgstr "打开所有快捷键提示信息" #: notebook/templates/notebook.html:291 msgid "Keyboard Shortcuts" msgstr "快捷键" #: notebook/templates/notebook.html:292 msgid "Opens a dialog allowing you to edit Keyboard shortcuts" msgstr "打开对话框编辑快捷键" #: notebook/templates/notebook.html:292 msgid "Edit Keyboard Shortcuts" msgstr "编辑快捷键" #: notebook/templates/notebook.html:297 msgid "Notebook Help" msgstr "帮助" #: notebook/templates/notebook.html:303 msgid "Opens in a new window" msgstr "在新窗口打开" #: notebook/templates/notebook.html:319 msgid "About Jupyter Notebook" msgstr "关于本程序" #: notebook/templates/notebook.html:319 msgid "About" msgstr "关于" #: notebook/templates/page.html:114 msgid "Jupyter Notebook requires JavaScript." msgstr "Jupyter Notebook需要的JavaScript." #: notebook/templates/page.html:115 msgid "Please enable it to proceed. " msgstr "请允许它继续." #: notebook/templates/page.html:122 msgid "dashboard" msgstr "指示板" #: notebook/templates/page.html:135 msgid "Logout" msgstr "注销" #: notebook/templates/page.html:137 msgid "Login" msgstr "登录" #: notebook/templates/tree.html:23 msgid "Files" msgstr "文件" #: notebook/templates/tree.html:24 msgid "Running" msgstr "运行" #: notebook/templates/tree.html:25 msgid "Clusters" msgstr "集群" #: notebook/templates/tree.html:32 msgid "Select items to perform actions on them." msgstr "选择操作对象." #: notebook/templates/tree.html:35 msgid "Duplicate selected" msgstr "复制选择的对象" #: notebook/templates/tree.html:35 msgid "Duplicate" msgstr "复制" #: notebook/templates/tree.html:36 msgid "Rename selected" msgstr "重命名" #: notebook/templates/tree.html:37 msgid "Move selected" msgstr "移动" #: notebook/templates/tree.html:37 msgid "Move" msgstr "移动" #: notebook/templates/tree.html:38 msgid "Download selected" msgstr "下载" #: notebook/templates/tree.html:39 msgid "Shutdown selected notebook(s)" msgstr "停止运行选择的notebook(s)" #: notebook/templates/notebook.html:278 #: notebook/templates/tree.html:39 msgid "Shutdown" msgstr "关闭" #: notebook/templates/tree.html:40 msgid "View selected" msgstr "查看" #: notebook/templates/tree.html:41 msgid "Edit selected" msgstr "编辑" #: notebook/templates/tree.html:42 msgid "Delete selected" msgstr "删除" #: notebook/templates/tree.html:50 msgid "Click to browse for a file to upload." msgstr "点击浏览文件上传" #: notebook/templates/tree.html:51 msgid "Upload" msgstr "上传" #: notebook/templates/tree.html:65 msgid "Text File" msgstr "文本文件" #: notebook/templates/tree.html:68 msgid "Folder" msgstr "文件夹" #: notebook/templates/tree.html:72 msgid "Terminal" msgstr "终端" #: notebook/templates/tree.html:76 msgid "Terminals Unavailable" msgstr "终端不可用" #: notebook/templates/tree.html:82 msgid "Refresh notebook list" msgstr "刷新笔记列表" #: notebook/templates/tree.html:90 msgid "Select All / None" msgstr "全选 / 全部选" #: notebook/templates/tree.html:93 msgid "Select..." msgstr "选择..." #: notebook/templates/tree.html:98 msgid "Select All Folders" msgstr "选择所有文件夹" #: notebook/templates/tree.html:98 msgid " Folders" msgstr "文件夹" #: notebook/templates/tree.html:99 msgid "Select All Notebooks" msgstr "选择所有笔记" #: notebook/templates/tree.html:99 msgid " All Notebooks" msgstr "所有笔记" #: notebook/templates/tree.html:100 msgid "Select Running Notebooks" msgstr "选择运行中的笔记" #: notebook/templates/tree.html:100 msgid " Running" msgstr "运行" #: notebook/templates/tree.html:101 msgid "Select All Files" msgstr "选择所有文件" #: notebook/templates/tree.html:101 msgid " Files" msgstr "文件" #: notebook/templates/tree.html:114 msgid "Last Modified" msgstr "最后修改" #: notebook/templates/tree.html:120 msgid "Name" msgstr "名字" #: notebook/templates/tree.html:130 msgid "Currently running Jupyter processes" msgstr "当前运行Jupyter" #: notebook/templates/tree.html:134 msgid "Refresh running list" msgstr "刷新运行列表" #: notebook/templates/tree.html:150 msgid "There are no terminals running." msgstr "没有终端运行" #: notebook/templates/tree.html:152 msgid "Terminals are unavailable." msgstr "终端不可用" #: notebook/templates/tree.html:162 msgid "Notebooks" msgstr "笔记" #: notebook/templates/tree.html:169 msgid "There are no notebooks running." msgstr "没有笔记正在运行" #: notebook/templates/tree.html:178 msgid "Clusters tab is now provided by IPython parallel." msgstr "集群标签现在由IPython并行提供." #: notebook/templates/tree.html:179 msgid "See 'IPython parallel' for installation details." msgstr "安装细节查看 'IPython parallel'." jupyter_server-1.13.1/jupyter_server/i18n/zh_CN/LC_MESSAGES/notebook.po000066400000000000000000000337741415445537200255320ustar00rootroot00000000000000# Translations template for Jupyter. # Copyright (C) 2017 ORGANIZATION # This file is distributed under the same license as the Jupyter project. # FIRST AUTHOR , 2017. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: Jupyter VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" "POT-Creation-Date: 2017-08-25 02:53-0400\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.5.0\n" #: notebook/serverapp.py:49 msgid "The Jupyter Notebook requires tornado >= 4.0" msgstr "该程序要求 tornado 版本 >= 4.0" #: notebook/serverapp.py:53 msgid "The Jupyter Notebook requires tornado >= 4.0, but you have < 1.1.0" msgstr "该程序要求 tornado 版本 >= 4.0, 可是现实却是 < 1.1.0" #: notebook/serverapp.py:55 #, python-format msgid "The Jupyter Notebook requires tornado >= 4.0, but you have %s" msgstr "该程序要求 tornado 版本 >= 4.0, 可是现实却是 %s" #: notebook/serverapp.py:206 #, python-format msgid "Alternatively use `%s` when working on the notebook's Javascript and LESS" msgstr "在使用notebook的JavaScript和LESS时,可以替换使用 `%s` " #: notebook/serverapp.py:385 msgid "List currently running notebook servers." msgstr "列出当前运行的Notebook服务." #: notebook/serverapp.py:389 msgid "Produce machine-readable JSON list output." msgstr "生成机器可读的JSON输出." #: notebook/serverapp.py:391 msgid "Produce machine-readable JSON object on each line of output." msgstr "当前运行的服务" #: notebook/serverapp.py:395 msgid "If True, the output will be a JSON list of objects, one per active notebook server, each with the details from the relevant server info file." msgstr "如果是正确的,输出将是一个对象的JSON列表,一个活动的笔记本服务器,每一个都有相关的服务器信息文件的详细信息。" #: notebook/serverapp.py:399 msgid "If True, each line of output will be a JSON object with the details from the server info file. For a JSON list output, see the NbserverListApp.jsonlist configuration value" msgstr "如果是正确的,每一行输出将是一个JSON对象,其中有来自服务器信息文件的详细信息。对于一个JSON列表输出,请参阅NbserverListApp。jsonlist配置值" #: notebook/serverapp.py:425 msgid "Don't open the notebook in a browser after startup." msgstr "在启动服务以后不在浏览器中打开一个窗口." #: notebook/serverapp.py:429 msgid "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib." msgstr "" #: notebook/serverapp.py:445 msgid "Allow the notebook to be run from root user." msgstr "允许notebook在root用户下运行." #: notebook/serverapp.py:476 msgid "" "The Jupyter HTML Notebook.\n" " \n" " This launches a Tornado based HTML Notebook Server that serves up an HTML5/Javascript Notebook client." msgstr "The Jupyter HTML Notebook.\n \n 这将启动一个基于tornado的HTML笔记本服务器,它提供一个html5/javascript笔记本客户端。" #: notebook/serverapp.py:546 msgid "Set the Access-Control-Allow-Credentials: true header" msgstr "设置Access-Control-Allow-Credentials:true报头" #: notebook/serverapp.py:550 msgid "Whether to allow the user to run the notebook as root." msgstr "是否允许notebook在root用户下运行." #: notebook/serverapp.py:554 msgid "The default URL to redirect to from `/`" msgstr "从 `/` 重定向到的默认URL " #: notebook/serverapp.py:558 msgid "The IP address the notebook server will listen on." msgstr "notebook服务会监听的IP地址." #: notebook/serverapp.py:571 #, python-format msgid "" "Cannot bind to localhost, using 127.0.0.1 as default ip\n" "%s" msgstr "不能绑定到localhost, 使用127.0.0.1作为默认的IP \n %s" #: notebook/serverapp.py:585 msgid "The port the notebook server will listen on." msgstr "notebook服务会监听的IP端口." #: notebook/serverapp.py:589 msgid "The number of additional ports to try if the specified port is not available." msgstr "如果指定的端口不可用,则要尝试其他端口的数量." #: notebook/serverapp.py:593 msgid "The full path to an SSL/TLS certificate file." msgstr "SSL/TLS 认证文件所在全路径." #: notebook/serverapp.py:597 msgid "The full path to a private key file for usage with SSL/TLS." msgstr "SSL/TLS 私钥文件所在全路径." #: notebook/serverapp.py:601 msgid "The full path to a certificate authority certificate for SSL/TLS client authentication." msgstr "用于ssl/tls客户端身份验证的证书颁发证书的完整路径." #: notebook/serverapp.py:605 msgid "The file where the cookie secret is stored." msgstr "存放cookie密钥的文件被保存了." #: notebook/serverapp.py:634 #, python-format msgid "Writing notebook server cookie secret to %s" msgstr "把notebook 服务cookie密码写入 %s" #: notebook/serverapp.py:641 #, python-format msgid "Could not set permissions on %s" msgstr "不能在 %s 设置权限" #: notebook/serverapp.py:646 msgid "" "Token used for authenticating first-time connections to the server.\n" "\n" " When no password is enabled,\n" " the default is to generate a new, random token.\n" "\n" " Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED.\n" " " msgstr "" #: notebook/serverapp.py:656 msgid "" "One-time token used for opening a browser.\n" " Once used, this token cannot be used again.\n" " " msgstr "" #: notebook/serverapp.py:732 msgid "" "Specify Where to open the notebook on startup. This is the\n" " `new` argument passed to the standard library method `webbrowser.open`.\n" " The behaviour is not guaranteed, but depends on browser support. Valid\n" " values are:\n" " 2 opens a new tab,\n" " 1 opens a new window,\n" " 0 opens in an existing window.\n" " See the `webbrowser.open` documentation for details.\n" " " msgstr "" #: notebook/serverapp.py:752 msgid "Supply overrides for the tornado.web.Application that the Jupyter notebook uses." msgstr "" #: notebook/serverapp.py:756 msgid "" "\n" " Set the tornado compression options for websocket connections.\n" "\n" " This value will be returned from :meth:`WebSocketHandler.get_compression_options`.\n" " None (default) will disable compression.\n" " A dict (even an empty one) will enable compression.\n" "\n" " See the tornado docs for WebSocketHandler.get_compression_options for details.\n" " " msgstr "" #: notebook/serverapp.py:767 msgid "Supply overrides for terminado. Currently only supports \"shell_command\"." msgstr "" #: notebook/serverapp.py:770 msgid "Extra keyword arguments to pass to `set_secure_cookie`. See tornado's set_secure_cookie docs for details." msgstr "" #: notebook/serverapp.py:774 msgid "" "Supply SSL options for the tornado HTTPServer.\n" " See the tornado docs for details." msgstr "" #: notebook/serverapp.py:778 msgid "Supply extra arguments that will be passed to Jinja environment." msgstr "" #: notebook/serverapp.py:782 msgid "Extra variables to supply to jinja templates when rendering." msgstr "" #: notebook/serverapp.py:838 msgid "Path to search for custom.js, css" msgstr "" #: notebook/serverapp.py:850 msgid "" "Extra paths to search for serving jinja templates.\n" "\n" " Can be used to override templates from notebook.templates." msgstr "" #: notebook/serverapp.py:861 msgid "extra paths to look for Javascript notebook extensions" msgstr "" #: notebook/serverapp.py:906 #, python-format msgid "Using MathJax: %s" msgstr "" #: notebook/serverapp.py:909 msgid "The MathJax.js configuration file that is to be used." msgstr "" #: notebook/serverapp.py:914 #, python-format msgid "Using MathJax configuration file: %s" msgstr "" #: notebook/serverapp.py:920 msgid "The notebook manager class to use." msgstr "" #: notebook/serverapp.py:926 msgid "The kernel manager class to use." msgstr "" #: notebook/serverapp.py:932 msgid "The session manager class to use." msgstr "" #: notebook/serverapp.py:938 msgid "The config manager class to use" msgstr "" #: notebook/serverapp.py:959 msgid "The login handler class to use." msgstr "" #: notebook/serverapp.py:966 msgid "The logout handler class to use." msgstr "" #: notebook/serverapp.py:970 msgid "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headerssent by the upstream reverse proxy. Necessary if the proxy handles SSL" msgstr "" #: notebook/serverapp.py:982 msgid "" "\n" " DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.\n" " " msgstr "" #: notebook/serverapp.py:994 msgid "Support for specifying --pylab on the command line has been removed." msgstr "" #: notebook/serverapp.py:996 msgid "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself." msgstr "" #: notebook/serverapp.py:1001 msgid "The directory to use for notebooks and kernels." msgstr "用于笔记本和内核的目录。" #: notebook/serverapp.py:1024 #, python-format msgid "No such notebook dir: '%r'" msgstr "没有找到路径: '%r' " #: notebook/serverapp.py:1046 msgid "Dict of Python modules to load as notebook server extensions.Entry values can be used to enable and disable the loading ofthe extensions. The extensions will be loaded in alphabetical order." msgstr "将Python模块作为笔记本服务器扩展加载。可以使用条目值来启用和禁用扩展的加载。这些扩展将以字母顺序加载。" #: notebook/serverapp.py:1055 msgid "Reraise exceptions encountered loading server extensions?" msgstr "重新运行的异常会遇到加载服务器扩展吗?" #: notebook/serverapp.py:1058 msgid "" "(msgs/sec)\n" " Maximum rate at which messages can be sent on iopub before they are\n" " limited." msgstr "" #: notebook/serverapp.py:1062 msgid "" "(bytes/sec)\n" " Maximum rate at which stream output can be sent on iopub before they are\n" " limited." msgstr "" #: notebook/serverapp.py:1066 msgid "" "(sec) Time window used to \n" " check the message and data rate limits." msgstr "(sec)时间窗口被用来 \n 检查消息和数据速率限制." #: notebook/serverapp.py:1077 #, python-format msgid "No such file or directory: %s" msgstr "找不到文件或文件夹: %s" #: notebook/serverapp.py:1147 msgid "Notebook servers are configured to only be run with a password." msgstr "服务设置为只能使用密码运行." #: notebook/serverapp.py:1148 msgid "Hint: run the following command to set a password" msgstr "提示: 运行下面命令设置密码" #: notebook/serverapp.py:1149 msgid "\t$ python -m notebook.auth password" msgstr "" #: notebook/serverapp.py:1187 #, python-format msgid "The port %i is already in use, trying another port." msgstr "端口 %i 已经被站用, 请尝试其他端口." #: notebook/serverapp.py:1190 #, python-format msgid "Permission to listen on port %i denied" msgstr "监听端口 %i 失败" #: notebook/serverapp.py:1199 msgid "ERROR: the notebook server could not be started because no available port could be found." msgstr "错误: 服务启动失败因为没有找到可用的端口. " #: notebook/serverapp.py:1205 msgid "[all ip addresses on your system]" msgstr "[系统所有IP地址]" #: notebook/serverapp.py:1229 #, python-format msgid "Terminals not available (error was %s)" msgstr "终端不可用(错误: %s)" #: notebook/serverapp.py:1265 msgid "interrupted" msgstr "中断" #: notebook/serverapp.py:1267 msgid "y" msgstr "" #: notebook/serverapp.py:1268 msgid "n" msgstr "" #: notebook/serverapp.py:1269 #, python-format msgid "Shutdown this notebook server (%s/[%s])? " msgstr "关闭服务 (%s/[%s])" #: notebook/serverapp.py:1275 msgid "Shutdown confirmed" msgstr "关闭确定" #: notebook/serverapp.py:1279 msgid "No answer for 5s:" msgstr "5s 未响应" #: notebook/serverapp.py:1280 msgid "resuming operation..." msgstr "重启操作..." #: notebook/serverapp.py:1288 #, python-format msgid "received signal %s, stopping" msgstr "接受信号 %s, 正在停止" #: notebook/serverapp.py:1344 #, python-format msgid "Error loading server extension %s" msgstr "加载插件 %s 失败" #: notebook/serverapp.py:1375 #, python-format msgid "Shutting down %d kernel" msgid_plural "Shutting down %d kernels" msgstr[0] "关闭 %d 服务" msgstr[1] "关闭 %d 服务" #: notebook/serverapp.py:1383 #, python-format msgid "%d active kernel" msgid_plural "%d active kernels" msgstr[0] "%d 活跃的服务" msgstr[1] "%d 活跃的服务" #: notebook/serverapp.py:1387 #, python-format msgid "" "The Jupyter Notebook is running at:\n" "%s" msgstr "本程序运行在: %s" #: notebook/serverapp.py:1434 msgid "Running as root is not recommended. Use --allow-root to bypass." msgstr "不建议以root身份运行.使用--allow-root绕过过." #: notebook/serverapp.py:1440 msgid "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." msgstr "使用control-c停止此服务器并关闭所有内核(两次跳过确认)." #: notebook/serverapp.py:1442 msgid "Welcome to Project Jupyter! Explore the various tools available and their corresponding documentation. If you are interested in contributing to the platform, please visit the communityresources section at http://jupyter.org/community.html." msgstr "欢迎来到项目Jupyter! 探索可用的各种工具及其相应的文档. 如果你有兴趣对这个平台,请访问http://jupyter.org/community.html community resources部分." #: notebook/serverapp.py:1453 #, python-format msgid "No web browser found: %s." msgstr "没有找到web浏览器: %s." #: notebook/serverapp.py:1458 #, python-format msgid "%s does not exist" msgstr "%s 不存在" #: notebook/serverapp.py:1492 msgid "Interrupted..." msgstr "已经中断..." #: notebook/services/contents/filemanager.py:525 #, python-format msgid "Serving notebooks from local directory: %s" msgstr "启动notebooks 在本地路径: %s" #: notebook/services/contents/manager.py:69 msgid "Untitled" msgstr "未命名" jupyter_server-1.13.1/jupyter_server/kernelspecs/000077500000000000000000000000001415445537200223055ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/kernelspecs/__init__.py000066400000000000000000000000001415445537200244040ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/kernelspecs/handlers.py000066400000000000000000000020121415445537200244520ustar00rootroot00000000000000from tornado import web from ..base.handlers import JupyterHandler from ..services.kernelspecs.handlers import kernel_name_regex class KernelSpecResourceHandler(web.StaticFileHandler, JupyterHandler): SUPPORTED_METHODS = ("GET", "HEAD") def initialize(self): web.StaticFileHandler.initialize(self, path="") @web.authenticated def get(self, kernel_name, path, include_body=True): ksm = self.kernel_spec_manager try: self.root = ksm.get_kernel_spec(kernel_name).resource_dir except KeyError as e: raise web.HTTPError(404, u"Kernel spec %s not found" % kernel_name) from e self.log.debug("Serving kernel resource from: %s", self.root) return web.StaticFileHandler.get(self, path, include_body=include_body) @web.authenticated def head(self, kernel_name, path): return self.get(kernel_name, path, include_body=False) default_handlers = [ (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, KernelSpecResourceHandler), ] jupyter_server-1.13.1/jupyter_server/log.py000066400000000000000000000035051415445537200211250ustar00rootroot00000000000000# ----------------------------------------------------------------------------- # Copyright (c) Jupyter Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. # ----------------------------------------------------------------------------- import json from tornado.log import access_log from .prometheus.log_functions import prometheus_log_method def log_request(handler): """log a bit more information about each request than tornado's default - move static file get success to debug-level (reduces noise) - get proxied IP instead of proxy IP - log referer for redirect and failed requests - log user-agent for failed requests """ status = handler.get_status() request = handler.request try: logger = handler.log except AttributeError: logger = access_log if status < 300 or status == 304: # Successes (or 304 FOUND) are debug-level log_method = logger.debug elif status < 400: log_method = logger.info elif status < 500: log_method = logger.warning else: log_method = logger.error request_time = 1000.0 * handler.request.request_time() ns = dict( status=status, method=request.method, ip=request.remote_ip, uri=request.uri, request_time=request_time, ) msg = "{status} {method} {uri} ({ip}) {request_time:.2f}ms" if status >= 400: # log bad referers ns["referer"] = request.headers.get("Referer", "None") msg = msg + " referer={referer}" if status >= 500 and status != 502: # log all headers if it caused an error log_method(json.dumps(dict(request.headers), indent=2)) log_method(msg.format(**ns)) prometheus_log_method(handler) jupyter_server-1.13.1/jupyter_server/nbconvert/000077500000000000000000000000001415445537200217675ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/nbconvert/__init__.py000066400000000000000000000000001415445537200240660ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/nbconvert/handlers.py000066400000000000000000000142001415445537200241360ustar00rootroot00000000000000"""Tornado handlers for nbconvert.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import io import os import zipfile from ipython_genutils import text from ipython_genutils.py3compat import cast_bytes from nbformat import from_dict from tornado import web from tornado.log import app_log from ..base.handlers import FilesRedirectHandler from ..base.handlers import JupyterHandler from ..base.handlers import path_regex from jupyter_server.utils import ensure_async def find_resource_files(output_files_dir): files = [] for dirpath, dirnames, filenames in os.walk(output_files_dir): files.extend([os.path.join(dirpath, f) for f in filenames]) return files def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. Returns True if it has served a zip file, False if there are no resource files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip output_files = resources.get("outputs", None) if not output_files: return False # Headers zip_filename = os.path.splitext(name)[0] + ".zip" handler.set_attachment_header(zip_filename) handler.set_header("Content-Type", "application/zip") handler.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") # Prepare the zip file buffer = io.BytesIO() zipf = zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext(name)[0] + resources["output_extension"] zipf.writestr(output_filename, cast_bytes(output, "utf-8")) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish(buffer.getvalue()) return True def get_exporter(format, **kwargs): """get an exporter, raising appropriate errors""" # if this fails, will raise 500 try: from nbconvert.exporters.base import get_exporter except ImportError as e: raise web.HTTPError(500, "Could not import nbconvert: %s" % e) from e try: Exporter = get_exporter(format) except KeyError as e: # should this be 400? raise web.HTTPError(404, u"No exporter for format: %s" % format) from e try: return Exporter(**kwargs) except Exception as e: app_log.exception("Could not construct Exporter: %s", Exporter) raise web.HTTPError(500, "Could not construct Exporter: %s" % e) from e class NbconvertFileHandler(JupyterHandler): SUPPORTED_METHODS = ("GET",) @web.authenticated async def get(self, format, path): self.check_xsrf_cookie() exporter = get_exporter(format, config=self.config, log=self.log) path = path.strip("/") # If the notebook relates to a real file (default contents manager), # give its path to nbconvert. if hasattr(self.contents_manager, "_get_os_path"): os_path = self.contents_manager._get_os_path(path) ext_resources_dir, basename = os.path.split(os_path) else: ext_resources_dir = None model = await ensure_async(self.contents_manager.get(path=path)) name = model["name"] if model["type"] != "notebook": # not a notebook, redirect to files return FilesRedirectHandler.redirect_to_files(self, path) nb = model["content"] self.set_header("Last-Modified", model["last_modified"]) # create resources dictionary mod_date = model["last_modified"].strftime(text.date_format) nb_title = os.path.splitext(name)[0] resource_dict = { "metadata": {"name": nb_title, "modified_date": mod_date}, "config_dir": self.application.settings["config_dir"], } if ext_resources_dir: resource_dict["metadata"]["path"] = ext_resources_dir try: output, resources = exporter.from_notebook_node(nb, resources=resource_dict) except Exception as e: self.log.exception("nbconvert failed: %s", e) raise web.HTTPError(500, "nbconvert failed: %s" % e) from e if respond_zip(self, name, output, resources): return # Force download if requested if self.get_argument("download", "false").lower() == "true": filename = os.path.splitext(name)[0] + resources["output_extension"] self.set_attachment_header(filename) # MIME type if exporter.output_mimetype: self.set_header("Content-Type", "%s; charset=utf-8" % exporter.output_mimetype) self.set_header("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0") self.finish(output) class NbconvertPostHandler(JupyterHandler): SUPPORTED_METHODS = ("POST",) @web.authenticated def post(self, format): exporter = get_exporter(format, config=self.config) model = self.get_json_body() name = model.get("name", "notebook.ipynb") nbnode = from_dict(model["content"]) try: output, resources = exporter.from_notebook_node( nbnode, resources={ "metadata": { "name": name[: name.rfind(".")], }, "config_dir": self.application.settings["config_dir"], }, ) except Exception as e: raise web.HTTPError(500, "nbconvert failed: %s" % e) from e if respond_zip(self, name, output, resources): return # MIME type if exporter.output_mimetype: self.set_header("Content-Type", "%s; charset=utf-8" % exporter.output_mimetype) self.finish(output) # ----------------------------------------------------------------------------- # URL to handler mappings # ----------------------------------------------------------------------------- _format_regex = r"(?P\w+)" default_handlers = [ (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), (r"/nbconvert/%s%s" % (_format_regex, path_regex), NbconvertFileHandler), ] jupyter_server-1.13.1/jupyter_server/prometheus/000077500000000000000000000000001415445537200221625ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/prometheus/__init__.py000066400000000000000000000000001415445537200242610ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/prometheus/log_functions.py000066400000000000000000000017621415445537200254130ustar00rootroot00000000000000from .metrics import HTTP_REQUEST_DURATION_SECONDS def prometheus_log_method(handler): """ Tornado log handler for recording RED metrics. We record the following metrics: Rate - the number of requests, per second, your services are serving. Errors - the number of failed requests per second. Duration - The amount of time each request takes expressed as a time interval. We use a fully qualified name of the handler as a label, rather than every url path to reduce cardinality. This function should be either the value of or called from a function that is the 'log_function' tornado setting. This makes it get called at the end of every request, allowing us to record the metrics we need. """ HTTP_REQUEST_DURATION_SECONDS.labels( method=handler.request.method, handler="{}.{}".format(handler.__class__.__module__, type(handler).__name__), status_code=handler.get_status(), ).observe(handler.request.request_time()) jupyter_server-1.13.1/jupyter_server/prometheus/metrics.py000066400000000000000000000022451415445537200242050ustar00rootroot00000000000000""" Prometheus metrics exported by Jupyter Server Read https://prometheus.io/docs/practices/naming/ for naming conventions for metrics & labels. """ try: # Jupyter Notebook also defines these metrics. Re-defining them results in a ValueError. # Try to de-duplicate by using the ones in Notebook if available. # See https://github.com/jupyter/jupyter_server/issues/209 # pylint: disable=unused-import from notebook.prometheus.metrics import ( HTTP_REQUEST_DURATION_SECONDS, TERMINAL_CURRENTLY_RUNNING_TOTAL, KERNEL_CURRENTLY_RUNNING_TOTAL, ) except ImportError: from prometheus_client import Histogram, Gauge HTTP_REQUEST_DURATION_SECONDS = Histogram( "http_request_duration_seconds", "duration in seconds for all HTTP requests", ["method", "handler", "status_code"], ) TERMINAL_CURRENTLY_RUNNING_TOTAL = Gauge( "terminal_currently_running_total", "counter for how many terminals are running", ) KERNEL_CURRENTLY_RUNNING_TOTAL = Gauge( "kernel_currently_running_total", "counter for how many kernels are running labeled by type", ["type"], ) jupyter_server-1.13.1/jupyter_server/pytest_plugin.py000066400000000000000000000363451415445537200232620ustar00rootroot00000000000000# Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import io import json import logging import os import shutil import sys import urllib.parse from binascii import hexlify import jupyter_core.paths import nbformat import pytest import tornado from tornado.escape import url_escape from traitlets.config import Config from jupyter_server.extension import serverextension from jupyter_server.serverapp import ServerApp from jupyter_server.services.contents.filemanager import FileContentsManager from jupyter_server.services.contents.largefilemanager import LargeFileManager from jupyter_server.utils import url_path_join # List of dependencies needed for this plugin. pytest_plugins = [ "pytest_tornasync", # Once the chunk below moves to Jupyter Core, we'll uncomment # This plugin and use the fixtures directly from Jupyter Core. # "jupyter_core.pytest_plugin" ] import asyncio if os.name == "nt" and sys.version_info >= (3, 7): asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # ============ Move to Jupyter Core ============= def mkdir(tmp_path, *parts): path = tmp_path.joinpath(*parts) if not path.exists(): path.mkdir(parents=True) return path @pytest.fixture def jp_home_dir(tmp_path): """Provides a temporary HOME directory value.""" return mkdir(tmp_path, "home") @pytest.fixture def jp_data_dir(tmp_path): """Provides a temporary Jupyter data dir directory value.""" return mkdir(tmp_path, "data") @pytest.fixture def jp_config_dir(tmp_path): """Provides a temporary Jupyter config dir directory value.""" return mkdir(tmp_path, "config") @pytest.fixture def jp_runtime_dir(tmp_path): """Provides a temporary Jupyter runtime dir directory value.""" return mkdir(tmp_path, "runtime") @pytest.fixture def jp_system_jupyter_path(tmp_path): """Provides a temporary Jupyter system path value.""" return mkdir(tmp_path, "share", "jupyter") @pytest.fixture def jp_env_jupyter_path(tmp_path): """Provides a temporary Jupyter env system path value.""" return mkdir(tmp_path, "env", "share", "jupyter") @pytest.fixture def jp_system_config_path(tmp_path): """Provides a temporary Jupyter config path value.""" return mkdir(tmp_path, "etc", "jupyter") @pytest.fixture def jp_env_config_path(tmp_path): """Provides a temporary Jupyter env config path value.""" return mkdir(tmp_path, "env", "etc", "jupyter") @pytest.fixture def jp_environ( monkeypatch, tmp_path, jp_home_dir, jp_data_dir, jp_config_dir, jp_runtime_dir, jp_system_jupyter_path, jp_system_config_path, jp_env_jupyter_path, jp_env_config_path, ): """Configures a temporary environment based on Jupyter-specific environment variables. """ monkeypatch.setenv("HOME", str(jp_home_dir)) monkeypatch.setenv("PYTHONPATH", os.pathsep.join(sys.path)) # monkeypatch.setenv("JUPYTER_NO_CONFIG", "1") monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(jp_config_dir)) monkeypatch.setenv("JUPYTER_DATA_DIR", str(jp_data_dir)) monkeypatch.setenv("JUPYTER_RUNTIME_DIR", str(jp_runtime_dir)) monkeypatch.setattr(jupyter_core.paths, "SYSTEM_JUPYTER_PATH", [str(jp_system_jupyter_path)]) monkeypatch.setattr(jupyter_core.paths, "ENV_JUPYTER_PATH", [str(jp_env_jupyter_path)]) monkeypatch.setattr(jupyter_core.paths, "SYSTEM_CONFIG_PATH", [str(jp_system_config_path)]) monkeypatch.setattr(jupyter_core.paths, "ENV_CONFIG_PATH", [str(jp_env_config_path)]) # ================= End: Move to Jupyter core ================ @pytest.fixture def jp_server_config(): """Allows tests to setup their specific configuration values. """ return {} @pytest.fixture def jp_root_dir(tmp_path): """Provides a temporary Jupyter root directory value.""" return mkdir(tmp_path, "root_dir") @pytest.fixture def jp_template_dir(tmp_path): """Provides a temporary Jupyter templates directory value.""" return mkdir(tmp_path, "templates") @pytest.fixture def jp_argv(): """Allows tests to setup specific argv values. """ return [] @pytest.fixture def jp_extension_environ(jp_env_config_path, monkeypatch): """Monkeypatch a Jupyter Extension's config path into each test's environment variable""" monkeypatch.setattr(serverextension, "ENV_CONFIG_PATH", [str(jp_env_config_path)]) @pytest.fixture def jp_http_port(http_server_port): """Returns the port value from the http_server_port fixture. """ return http_server_port[-1] @pytest.fixture def jp_nbconvert_templates(jp_data_dir): """Setups up a temporary directory consisting of the nbconvert templates.""" # Get path to nbconvert template directory *before* # monkeypatching the paths env variable via the jp_environ fixture. possible_paths = jupyter_core.paths.jupyter_path("nbconvert", "templates") nbconvert_path = None for path in possible_paths: if os.path.exists(path): nbconvert_path = path break nbconvert_target = jp_data_dir / "nbconvert" / "templates" # copy nbconvert templates to new tmp data_dir. if nbconvert_path: shutil.copytree(nbconvert_path, str(nbconvert_target)) @pytest.fixture def jp_logging_stream(): """StringIO stream intended to be used by the core Jupyter ServerApp logger's default StreamHandler. This helps avoid collision with stdout which is hijacked by Pytest. """ logging_stream = io.StringIO() yield logging_stream output = logging_stream.getvalue() # If output exists, print it. if output: print(output) return output @pytest.fixture(scope="function") def jp_configurable_serverapp( jp_nbconvert_templates, # this fixture must preceed jp_environ jp_environ, jp_server_config, jp_argv, jp_http_port, jp_base_url, tmp_path, jp_root_dir, io_loop, jp_logging_stream, ): """Starts a Jupyter Server instance based on the provided configuration values. The fixture is a factory; it can be called like a function inside a unit test. Here's a basic example of how use this fixture: .. code-block:: python def my_test(jp_configurable_serverapp): app = jp_configurable_serverapp(...) ... """ ServerApp.clear_instance() def _configurable_serverapp( config=jp_server_config, base_url=jp_base_url, argv=jp_argv, environ=jp_environ, http_port=jp_http_port, tmp_path=tmp_path, root_dir=jp_root_dir, **kwargs ): c = Config(config) c.NotebookNotary.db_file = ":memory:" token = hexlify(os.urandom(4)).decode("ascii") app = ServerApp.instance( # Set the log level to debug for testing purposes log_level="DEBUG", port=http_port, port_retries=0, open_browser=False, root_dir=str(root_dir), base_url=base_url, config=c, allow_root=True, token=token, **kwargs ) app.init_signal = lambda: None app.log.propagate = True app.log.handlers = [] # Initialize app without httpserver app.initialize(argv=argv, new_httpserver=False) # Reroute all logging StreamHandlers away from stdin/stdout since pytest hijacks # these streams and closes them at unfortunate times. stream_handlers = [h for h in app.log.handlers if isinstance(h, logging.StreamHandler)] for handler in stream_handlers: handler.setStream(jp_logging_stream) app.log.propagate = True app.log.handlers = [] # Start app without ioloop app.start_app() return app return _configurable_serverapp @pytest.fixture def jp_ensure_app_fixture(request): """Ensures that the 'app' fixture used by pytest-tornasync is set to `jp_web_app`, the Tornado Web Application returned by the ServerApp in Jupyter Server, provided by the jp_web_app fixture in this module. Note, this hardcodes the `app_fixture` option from pytest-tornasync to `jp_web_app`. If this value is configured to something other than the default, it will raise an exception. """ app_option = request.config.getoption("app_fixture") if app_option not in ["app", "jp_web_app"]: raise Exception( "jp_serverapp requires the `app-fixture` option " "to be set to 'jp_web_app`. Try rerunning the " "current tests with the option `--app-fixture " "jp_web_app`." ) elif app_option == "app": # Manually set the app_fixture to `jp_web_app` if it's # not set already. request.config.option.app_fixture = "jp_web_app" @pytest.fixture(scope="function") def jp_serverapp(jp_ensure_app_fixture, jp_server_config, jp_argv, jp_configurable_serverapp): """Starts a Jupyter Server instance based on the established configuration values.""" app = jp_configurable_serverapp(config=jp_server_config, argv=jp_argv) yield app app.remove_server_info_file() app.remove_browser_open_files() @pytest.fixture def jp_web_app(jp_serverapp): """app fixture is needed by pytest_tornasync plugin""" return jp_serverapp.web_app @pytest.fixture def jp_auth_header(jp_serverapp): """Configures an authorization header using the token from the serverapp fixture.""" return {"Authorization": "token {token}".format(token=jp_serverapp.token)} @pytest.fixture def jp_base_url(): """Returns the base url to use for the test.""" return "/a%40b/" @pytest.fixture def jp_fetch(jp_serverapp, http_server_client, jp_auth_header, jp_base_url): """Sends an (asynchronous) HTTP request to a test server. The fixture is a factory; it can be called like a function inside a unit test. Here's a basic example of how use this fixture: .. code-block:: python async def my_test(jp_fetch): response = await jp_fetch("api", "spec.yaml") ... """ def client_fetch(*parts, headers=None, params=None, **kwargs): if not headers: headers = {} if not params: params = {} # Handle URL strings path_url = url_escape(url_path_join(*parts), plus=False) base_path_url = url_path_join(jp_base_url, path_url) params_url = urllib.parse.urlencode(params) url = base_path_url + "?" + params_url # Add auth keys to header headers.update(jp_auth_header) # Make request. return http_server_client.fetch(url, headers=headers, request_timeout=20, **kwargs) return client_fetch @pytest.fixture def jp_ws_fetch(jp_serverapp, http_server_client, jp_auth_header, jp_http_port, jp_base_url): """Sends a websocket request to a test server. The fixture is a factory; it can be called like a function inside a unit test. Here's a basic example of how use this fixture: .. code-block:: python async def my_test(jp_fetch, jp_ws_fetch): # Start a kernel r = await jp_fetch( 'api', 'kernels', method='POST', body=json.dumps({ 'name': "python3" }) ) kid = json.loads(r.body.decode())['id'] # Open a websocket connection. ws = await jp_ws_fetch( 'api', 'kernels', kid, 'channels' ) ... """ def client_fetch(*parts, headers=None, params=None, **kwargs): if not headers: headers = {} if not params: params = {} # Handle URL strings path_url = url_escape(url_path_join(*parts), plus=False) base_path_url = url_path_join(jp_base_url, path_url) urlparts = urllib.parse.urlparse("ws://localhost:{}".format(jp_http_port)) urlparts = urlparts._replace(path=base_path_url, query=urllib.parse.urlencode(params)) url = urlparts.geturl() # Add auth keys to header headers.update(jp_auth_header) # Make request. req = tornado.httpclient.HTTPRequest(url, headers=headers, connect_timeout=120) return tornado.websocket.websocket_connect(req) return client_fetch some_resource = u"The very model of a modern major general" sample_kernel_json = { "argv": ["cat", "{connection_file}"], "display_name": "Test kernel", } @pytest.fixture def jp_kernelspecs(jp_data_dir): """Configures some sample kernelspecs in the Jupyter data directory.""" spec_names = ["sample", "sample 2", "bad"] for name in spec_names: sample_kernel_dir = jp_data_dir.joinpath("kernels", name) sample_kernel_dir.mkdir(parents=True) # Create kernel json file sample_kernel_file = sample_kernel_dir.joinpath("kernel.json") kernel_json = sample_kernel_json.copy() if name == "bad": kernel_json["argv"] = ["non_existent_path"] sample_kernel_file.write_text(json.dumps(kernel_json)) # Create resources text sample_kernel_resources = sample_kernel_dir.joinpath("resource.txt") sample_kernel_resources.write_text(some_resource) @pytest.fixture(params=[True, False]) def jp_contents_manager(request, tmp_path): """Returns a FileContentsManager instance based on the use_atomic_writing parameter value.""" return FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param) @pytest.fixture def jp_large_contents_manager(tmp_path): """Returns a LargeFileManager instance.""" return LargeFileManager(root_dir=str(tmp_path)) @pytest.fixture def jp_create_notebook(jp_root_dir): """Creates a notebook in the test's home directory.""" def inner(nbpath): nbpath = jp_root_dir.joinpath(nbpath) # Check that the notebook has the correct file extension. if nbpath.suffix != ".ipynb": raise Exception("File extension for notebook must be .ipynb") # If the notebook path has a parent directory, make sure it's created. parent = nbpath.parent parent.mkdir(parents=True, exist_ok=True) # Create a notebook string and write to file. nb = nbformat.v4.new_notebook() nbtext = nbformat.writes(nb, version=4) nbpath.write_text(nbtext) return inner @pytest.fixture(autouse=True) def jp_server_cleanup(): yield ServerApp.clear_instance() @pytest.fixture def jp_cleanup_subprocesses(jp_serverapp): """Clean up subprocesses started by a Jupyter Server, i.e. kernels and terminal.""" async def _(): terminal_cleanup = jp_serverapp.web_app.settings["terminal_manager"].terminate_all kernel_cleanup = jp_serverapp.kernel_manager.shutdown_all if asyncio.iscoroutinefunction(terminal_cleanup): try: await terminal_cleanup() except Exception as e: print(e) else: try: await terminal_cleanup() except Exception as e: print(e) if asyncio.iscoroutinefunction(kernel_cleanup): try: await kernel_cleanup() except Exception as e: print(e) else: try: kernel_cleanup() except Exception as e: print(e) return _ jupyter_server-1.13.1/jupyter_server/serverapp.py000066400000000000000000002773121415445537200223640ustar00rootroot00000000000000# coding: utf-8 """A tornado based Jupyter server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import binascii import datetime import errno import gettext import hashlib import hmac import inspect import io import ipaddress import json import logging import mimetypes import os import pathlib import random import re import select import signal import socket import stat import sys import threading import time import urllib import webbrowser from base64 import encodebytes try: import resource except ImportError: # Windows resource = None from jinja2 import Environment, FileSystemLoader from jupyter_core.paths import secure_write from jupyter_server.transutils import trans, _i18n from jupyter_server.utils import run_sync_in_loop, urljoin, pathname2url # the minimum viable tornado version: needs to be kept in sync with setup.py MIN_TORNADO = (6, 1, 0) try: import tornado assert tornado.version_info >= MIN_TORNADO except (ImportError, AttributeError, AssertionError) as e: # pragma: no cover raise ImportError(_i18n("The Jupyter Server requires tornado >=%s.%s.%s") % MIN_TORNADO) from e from tornado import httpserver from tornado import ioloop from tornado import web from tornado.httputil import url_concat from tornado.log import LogFormatter, app_log, access_log, gen_log if not sys.platform.startswith("win"): from tornado.netutil import bind_unix_socket from jupyter_server import ( DEFAULT_JUPYTER_SERVER_PORT, DEFAULT_STATIC_FILES_PATH, DEFAULT_TEMPLATE_PATH_LIST, __version__, ) from jupyter_server.base.handlers import MainHandler, RedirectWithParams, Template404 from jupyter_server.log import log_request from jupyter_server.services.kernels.kernelmanager import ( MappingKernelManager, AsyncMappingKernelManager, ) from jupyter_server.services.config import ConfigManager from jupyter_server.services.contents.manager import AsyncContentsManager, ContentsManager from jupyter_server.services.contents.filemanager import ( AsyncFileContentsManager, FileContentsManager, ) from jupyter_server.services.contents.largefilemanager import LargeFileManager from jupyter_server.services.sessions.sessionmanager import SessionManager from jupyter_server.gateway.managers import ( GatewayMappingKernelManager, GatewayKernelSpecManager, GatewaySessionManager, GatewayClient, ) from jupyter_server.auth.login import LoginHandler from jupyter_server.auth.logout import LogoutHandler from jupyter_server.base.handlers import FileFindHandler from traitlets.config import Config from traitlets.config.application import catch_config_error, boolean_flag from jupyter_core.application import ( JupyterApp, base_flags, base_aliases, ) from jupyter_core.paths import jupyter_config_path from jupyter_client import KernelManager from jupyter_client.kernelspec import KernelSpecManager from jupyter_client.session import Session from nbformat.sign import NotebookNotary from traitlets import ( Any, Dict, Unicode, Integer, List, Bool, Bytes, Instance, TraitError, Type, Float, observe, default, validate, ) from jupyter_core.paths import jupyter_runtime_dir from jupyter_server._sysinfo import get_sys_info from jupyter_server._tz import utcnow from jupyter_server.utils import ( url_path_join, check_pid, url_escape, pathname2url, unix_socket_in_use, urlencode_unix_socket_path, fetch, ) from jupyter_server.extension.serverextension import ServerExtensionApp from jupyter_server.extension.manager import ExtensionManager from jupyter_server.extension.config import ExtensionConfigManager from jupyter_server.traittypes import TypeFromClasses # Tolerate missing terminado package. try: from jupyter_server.terminal import TerminalManager terminado_available = True except ImportError: terminado_available = False # ----------------------------------------------------------------------------- # Module globals # ----------------------------------------------------------------------------- _examples = """ jupyter server # start the server jupyter server --certfile=mycert.pem # use SSL/TLS certificate jupyter server password # enter a password to protect the server """ JUPYTER_SERVICE_HANDLERS = dict( auth=None, api=["jupyter_server.services.api.handlers"], config=["jupyter_server.services.config.handlers"], contents=["jupyter_server.services.contents.handlers"], files=["jupyter_server.files.handlers"], kernels=["jupyter_server.services.kernels.handlers"], kernelspecs=[ "jupyter_server.kernelspecs.handlers", "jupyter_server.services.kernelspecs.handlers", ], nbconvert=["jupyter_server.nbconvert.handlers", "jupyter_server.services.nbconvert.handlers"], security=["jupyter_server.services.security.handlers"], sessions=["jupyter_server.services.sessions.handlers"], shutdown=["jupyter_server.services.shutdown"], view=["jupyter_server.view.handlers"], ) # Added for backwards compatibility from classic notebook server. DEFAULT_SERVER_PORT = DEFAULT_JUPYTER_SERVER_PORT # ----------------------------------------------------------------------------- # Helper functions # ----------------------------------------------------------------------------- def random_ports(port, n): """Generate a list of n random ports near the given port. The first 5 ports will be sequential, and the remaining n-5 will be randomly selected in the range [port-2*n, port+2*n]. """ for i in range(min(5, n)): yield port + i for i in range(n - 5): yield max(1, port + random.randint(-2 * n, 2 * n)) def load_handlers(name): """Load the (URL pattern, handler) tuples for each component.""" mod = __import__(name, fromlist=["default_handlers"]) return mod.default_handlers # ----------------------------------------------------------------------------- # The Tornado web application # ----------------------------------------------------------------------------- class ServerWebApplication(web.Application): def __init__( self, jupyter_app, default_services, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, extra_services, log, base_url, default_url, settings_overrides, jinja_env_options, ): settings = self.init_settings( jupyter_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, extra_services, log, base_url, default_url, settings_overrides, jinja_env_options, ) handlers = self.init_handlers(default_services, settings) super(ServerWebApplication, self).__init__(handlers, **settings) def init_settings( self, jupyter_app, kernel_manager, contents_manager, session_manager, kernel_spec_manager, config_manager, extra_services, log, base_url, default_url, settings_overrides, jinja_env_options=None, ): _template_path = settings_overrides.get( "template_path", jupyter_app.template_file_path, ) if isinstance(_template_path, str): _template_path = (_template_path,) template_path = [os.path.expanduser(path) for path in _template_path] jenv_opt = {"autoescape": True} jenv_opt.update(jinja_env_options if jinja_env_options else {}) env = Environment( loader=FileSystemLoader(template_path), extensions=["jinja2.ext.i18n"], **jenv_opt ) sys_info = get_sys_info() # If the user is running the server in a git directory, make the assumption # that this is a dev install and suggest to the developer `npm run build:watch`. base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) dev_mode = os.path.exists(os.path.join(base_dir, ".git")) nbui = gettext.translation( "nbui", localedir=os.path.join(base_dir, "jupyter_server/i18n"), fallback=True ) env.install_gettext_translations(nbui, newstyle=False) if sys_info["commit_source"] == "repository": # don't cache (rely on 304) when working from master version_hash = "" else: # reset the cache on server restart version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") now = utcnow() root_dir = contents_manager.root_dir home = os.path.expanduser("~") if root_dir.startswith(home + os.path.sep): # collapse $HOME to ~ root_dir = "~" + root_dir[len(home) :] settings = dict( # basics log_function=log_request, base_url=base_url, default_url=default_url, template_path=template_path, static_path=jupyter_app.static_file_path, static_custom_path=jupyter_app.static_custom_path, static_handler_class=FileFindHandler, static_url_prefix=url_path_join(base_url, "/static/"), static_handler_args={ # don't cache custom.js "no_cache_paths": [url_path_join(base_url, "static", "custom")], }, version_hash=version_hash, # rate limits iopub_msg_rate_limit=jupyter_app.iopub_msg_rate_limit, iopub_data_rate_limit=jupyter_app.iopub_data_rate_limit, rate_limit_window=jupyter_app.rate_limit_window, # authentication cookie_secret=jupyter_app.cookie_secret, login_url=url_path_join(base_url, "/login"), login_handler_class=jupyter_app.login_handler_class, logout_handler_class=jupyter_app.logout_handler_class, password=jupyter_app.password, xsrf_cookies=True, disable_check_xsrf=jupyter_app.disable_check_xsrf, allow_remote_access=jupyter_app.allow_remote_access, local_hostnames=jupyter_app.local_hostnames, authenticate_prometheus=jupyter_app.authenticate_prometheus, # managers kernel_manager=kernel_manager, contents_manager=contents_manager, session_manager=session_manager, kernel_spec_manager=kernel_spec_manager, config_manager=config_manager, # handlers extra_services=extra_services, # Jupyter stuff started=now, # place for extensions to register activity # so that they can prevent idle-shutdown last_activity_times={}, jinja_template_vars=jupyter_app.jinja_template_vars, websocket_url=jupyter_app.websocket_url, shutdown_button=jupyter_app.quit_button, config=jupyter_app.config, config_dir=jupyter_app.config_dir, allow_password_change=jupyter_app.allow_password_change, server_root_dir=root_dir, jinja2_env=env, terminals_available=terminado_available and jupyter_app.terminals_enabled, serverapp=jupyter_app, ) # allow custom overrides for the tornado web app. settings.update(settings_overrides) if base_url and "xsrf_cookie_kwargs" not in settings: # default: set xsrf cookie on base_url settings["xsrf_cookie_kwargs"] = {"path": base_url} return settings def init_handlers(self, default_services, settings): """Load the (URL pattern, handler) tuples for each component.""" # Order matters. The first handler to match the URL will handle the request. handlers = [] # load extra services specified by users before default handlers for service in settings["extra_services"]: handlers.extend(load_handlers(service)) # Add auth services. if "auth" in default_services: handlers.extend([(r"/login", settings["login_handler_class"])]) handlers.extend([(r"/logout", settings["logout_handler_class"])]) # Load default services. Raise exception if service not # found in JUPYTER_SERVICE_HANLDERS. for service in default_services: if service in JUPYTER_SERVICE_HANDLERS: locations = JUPYTER_SERVICE_HANDLERS[service] if locations is not None: for loc in locations: handlers.extend(load_handlers(loc)) else: raise Exception( "{} is not recognized as a jupyter_server " "service. If this is a custom service, " "try adding it to the " "`extra_services` list.".format(service) ) # Add extra handlers from contents manager. handlers.extend(settings["contents_manager"].get_extra_handlers()) # If gateway mode is enabled, replace appropriate handlers to perform redirection if GatewayClient.instance().gateway_enabled: # for each handler required for gateway, locate its pattern # in the current list and replace that entry... gateway_handlers = load_handlers("jupyter_server.gateway.handlers") for i, gwh in enumerate(gateway_handlers): for j, h in enumerate(handlers): if gwh[0] == h[0]: handlers[j] = (gwh[0], gwh[1]) break # register base handlers last handlers.extend(load_handlers("jupyter_server.base.handlers")) if settings["default_url"] != settings["base_url"]: # set the URL that will be redirected from `/` handlers.append( ( r"/?", RedirectWithParams, { "url": settings["default_url"], "permanent": False, # want 302, not 301 }, ) ) else: handlers.append((r"/", MainHandler)) # prepend base_url onto the patterns that we match new_handlers = [] for handler in handlers: pattern = url_path_join(settings["base_url"], handler[0]) new_handler = tuple([pattern] + list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through new_handlers.append((r"(.*)", Template404)) return new_handlers def last_activity(self): """Get a UTC timestamp for when the server last did something. Includes: API activity, kernel activity, kernel shutdown, and terminal activity. """ sources = [ self.settings["started"], self.settings["kernel_manager"].last_kernel_activity, ] try: sources.append(self.settings["api_last_activity"]) except KeyError: pass try: sources.append(self.settings["terminal_last_activity"]) except KeyError: pass sources.extend(self.settings["last_activity_times"].values()) return max(sources) class JupyterPasswordApp(JupyterApp): """Set a password for the Jupyter server. Setting a password secures the Jupyter server and removes the need for token-based authentication. """ description = __doc__ def _config_file_default(self): return os.path.join(self.config_dir, "jupyter_server_config.json") def start(self): from jupyter_server.auth.security import set_password set_password(config_file=self.config_file) self.log.info("Wrote hashed password to %s" % self.config_file) def shutdown_server(server_info, timeout=5, log=None): """Shutdown a Jupyter server in a separate process. *server_info* should be a dictionary as produced by list_running_servers(). Will first try to request shutdown using /api/shutdown . On Unix, if the server is still running after *timeout* seconds, it will send SIGTERM. After another timeout, it escalates to SIGKILL. Returns True if the server was stopped by any means, False if stopping it failed (on Windows). """ from tornado.httpclient import HTTPClient, HTTPRequest url = server_info["url"] pid = server_info["pid"] if log: log.debug("POST request to %sapi/shutdown", url) r = fetch(url, method="POST", headers={"Authorization": "token " + server_info["token"]}) # Poll to see if it shut down. for _ in range(timeout * 10): if not check_pid(pid): if log: log.debug("Server PID %s is gone", pid) return True time.sleep(0.1) if sys.platform.startswith("win"): return False if log: log.debug("SIGTERM to PID %s", pid) os.kill(pid, signal.SIGTERM) # Poll to see if it shut down. for _ in range(timeout * 10): if not check_pid(pid): if log: log.debug("Server PID %s is gone", pid) return True time.sleep(0.1) if log: log.debug("SIGKILL to PID %s", pid) os.kill(pid, signal.SIGKILL) return True # SIGKILL cannot be caught class JupyterServerStopApp(JupyterApp): version = __version__ description = "Stop currently running Jupyter server for a given port" port = Integer( DEFAULT_JUPYTER_SERVER_PORT, config=True, help="Port of the server to be killed. Default %s" % DEFAULT_JUPYTER_SERVER_PORT, ) sock = Unicode(u"", config=True, help="UNIX socket of the server to be killed.") def parse_command_line(self, argv=None): super(JupyterServerStopApp, self).parse_command_line(argv) if self.extra_args: try: self.port = int(self.extra_args[0]) except ValueError: # self.extra_args[0] was not an int, so it must be a string (unix socket). self.sock = self.extra_args[0] def shutdown_server(self, server): return shutdown_server(server, log=self.log) def _shutdown_or_exit(self, target_endpoint, server): print("Shutting down server on %s..." % target_endpoint) if not self.shutdown_server(server): sys.exit("Could not stop server on %s" % target_endpoint) @staticmethod def _maybe_remove_unix_socket(socket_path): try: os.unlink(socket_path) except (OSError, IOError): pass def start(self): servers = list(list_running_servers(self.runtime_dir, log=self.log)) if not servers: self.exit("There are no running servers (per %s)" % self.runtime_dir) for server in servers: if self.sock: sock = server.get("sock", None) if sock and sock == self.sock: self._shutdown_or_exit(sock, server) # Attempt to remove the UNIX socket after stopping. self._maybe_remove_unix_socket(sock) return elif self.port: port = server.get("port", None) if port == self.port: self._shutdown_or_exit(port, server) return current_endpoint = self.sock or self.port print( "There is currently no server running on {}".format(current_endpoint), file=sys.stderr ) print("Ports/sockets currently in use:", file=sys.stderr) for server in servers: print(" - {}".format(server.get("sock") or server["port"]), file=sys.stderr) self.exit(1) class JupyterServerListApp(JupyterApp): version = __version__ description = _i18n("List currently running Jupyter servers.") flags = dict( jsonlist=( {"JupyterServerListApp": {"jsonlist": True}}, _i18n("Produce machine-readable JSON list output."), ), json=( {"JupyterServerListApp": {"json": True}}, _i18n("Produce machine-readable JSON object on each line of output."), ), ) jsonlist = Bool( False, config=True, help=_i18n( "If True, the output will be a JSON list of objects, one per " "active Jupyer server, each with the details from the " "relevant server info file." ), ) json = Bool( False, config=True, help=_i18n( "If True, each line of output will be a JSON object with the " "details from the server info file. For a JSON list output, " "see the JupyterServerListApp.jsonlist configuration value" ), ) def start(self): serverinfo_list = list(list_running_servers(self.runtime_dir, log=self.log)) if self.jsonlist: print(json.dumps(serverinfo_list, indent=2)) elif self.json: for serverinfo in serverinfo_list: print(json.dumps(serverinfo)) else: print("Currently running servers:") for serverinfo in serverinfo_list: url = serverinfo["url"] if serverinfo.get("token"): url = url + "?token=%s" % serverinfo["token"] print(url, "::", serverinfo["root_dir"]) # ----------------------------------------------------------------------------- # Aliases and Flags # ----------------------------------------------------------------------------- flags = dict(base_flags) flags["allow-root"] = ( {"ServerApp": {"allow_root": True}}, _i18n("Allow the server to be run from root user."), ) flags["no-browser"] = ( {"ServerApp": {"open_browser": False}, "ExtensionApp": {"open_browser": False}}, _i18n("Prevent the opening of the default url in the browser."), ) flags["debug"] = ( {"ServerApp": {"log_level": "DEBUG"}, "ExtensionApp": {"log_level": "DEBUG"}}, _i18n("Set debug level for the extension and underlying server applications."), ) flags["autoreload"] = ( {"ServerApp": {"autoreload": True}}, """Autoreload the webapp Enable reloading of the tornado webapp and all imported Python packages when any changes are made to any Python src files in server or extensions. """, ) # Add notebook manager flags flags.update( boolean_flag( "script", "FileContentsManager.save_script", "DEPRECATED, IGNORED", "DEPRECATED, IGNORED" ) ) aliases = dict(base_aliases) aliases.update( { "ip": "ServerApp.ip", "port": "ServerApp.port", "port-retries": "ServerApp.port_retries", "sock": "ServerApp.sock", "sock-mode": "ServerApp.sock_mode", "transport": "KernelManager.transport", "keyfile": "ServerApp.keyfile", "certfile": "ServerApp.certfile", "client-ca": "ServerApp.client_ca", "notebook-dir": "ServerApp.root_dir", "preferred-dir": "ServerApp.preferred_dir", "browser": "ServerApp.browser", "pylab": "ServerApp.pylab", "gateway-url": "GatewayClient.url", } ) # ----------------------------------------------------------------------------- # ServerApp # ----------------------------------------------------------------------------- class ServerApp(JupyterApp): name = "jupyter-server" version = __version__ description = _i18n( """The Jupyter Server. This launches a Tornado-based Jupyter Server.""" ) examples = _examples flags = Dict(flags) aliases = Dict(aliases) classes = [ KernelManager, Session, MappingKernelManager, KernelSpecManager, AsyncMappingKernelManager, ContentsManager, FileContentsManager, AsyncContentsManager, AsyncFileContentsManager, NotebookNotary, GatewayMappingKernelManager, GatewayKernelSpecManager, GatewaySessionManager, GatewayClient, ] if terminado_available: # Only necessary when terminado is available classes.append(TerminalManager) subcommands = dict( list=(JupyterServerListApp, JupyterServerListApp.description.splitlines()[0]), stop=(JupyterServerStopApp, JupyterServerStopApp.description.splitlines()[0]), password=(JupyterPasswordApp, JupyterPasswordApp.description.splitlines()[0]), extension=(ServerExtensionApp, ServerExtensionApp.description.splitlines()[0]), ) # A list of services whose handlers will be exposed. # Subclasses can override this list to # expose a subset of these handlers. default_services = ( "api", "auth", "config", "contents", "files", "kernels", "kernelspecs", "nbconvert", "security", "sessions", "shutdown", "view", ) _log_formatter_cls = LogFormatter @default("log_level") def _default_log_level(self): return logging.INFO @default("log_format") def _default_log_format(self): """override default log format to include date & time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" # file to be opened in the Jupyter server file_to_run = Unicode("", help="Open the named file when the application is launched.").tag( config=True ) file_url_prefix = Unicode( "notebooks", help="The URL prefix where files are opened directly." ).tag(config=True) # Network related information allow_origin = Unicode( "", config=True, help="""Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. """, ) allow_origin_pat = Unicode( "", config=True, help="""Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: Access-Control-Allow-Origin: origin where `origin` is the origin of the request. Ignored if allow_origin is set. """, ) allow_credentials = Bool( False, config=True, help=_i18n("Set the Access-Control-Allow-Credentials: true header") ) allow_root = Bool( False, config=True, help=_i18n("Whether to allow the user to run the server as root.") ) autoreload = Bool( False, config=True, help=_i18n("Reload the webapp when changes are made to any Python src files."), ) default_url = Unicode("/", config=True, help=_i18n("The default URL to redirect to from `/`")) ip = Unicode( "localhost", config=True, help=_i18n("The IP address the Jupyter server will listen on.") ) @default("ip") def _default_ip(self): """Return localhost if available, 127.0.0.1 otherwise. On some (horribly broken) systems, localhost cannot be bound. """ s = socket.socket() try: s.bind(("localhost", 0)) except socket.error as e: self.log.warning( _i18n("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e ) return "127.0.0.1" else: s.close() return "localhost" @validate("ip") def _validate_ip(self, proposal): value = proposal["value"] if value == u"*": value = u"" return value custom_display_url = Unicode( u"", config=True, help=_i18n( """Override URL shown to users. Replace actual URL, including protocol, address, port and base URL, with the given value when displaying URL to the users. Do not change the actual connection URL. If authentication token is enabled, the token is added to the custom URL automatically. This option is intended to be used when the URL to display to the user cannot be determined reliably by the Jupyter server (proxified or containerized setups for example).""" ), ) port_env = "JUPYTER_PORT" port_default_value = DEFAULT_JUPYTER_SERVER_PORT port = Integer( config=True, help=_i18n("The port the server will listen on (env: JUPYTER_PORT).") ) @default("port") def port_default(self): return int(os.getenv(self.port_env, self.port_default_value)) port_retries_env = "JUPYTER_PORT_RETRIES" port_retries_default_value = 50 port_retries = Integer( port_retries_default_value, config=True, help=_i18n( "The number of additional ports to try if the specified port is not " "available (env: JUPYTER_PORT_RETRIES)." ), ) @default("port_retries") def port_retries_default(self): return int(os.getenv(self.port_retries_env, self.port_retries_default_value)) sock = Unicode(u"", config=True, help="The UNIX socket the Jupyter server will listen on.") sock_mode = Unicode( "0600", config=True, help="The permissions mode for UNIX socket creation (default: 0600)." ) @validate("sock_mode") def _validate_sock_mode(self, proposal): value = proposal["value"] try: converted_value = int(value.encode(), 8) assert all( ( # Ensure the mode is at least user readable/writable. bool(converted_value & stat.S_IRUSR), bool(converted_value & stat.S_IWUSR), # And isn't out of bounds. converted_value <= 2 ** 12, ) ) except ValueError: raise TraitError('invalid --sock-mode value: %s, please specify as e.g. "0600"' % value) except AssertionError: raise TraitError( "invalid --sock-mode value: %s, must have u+rw (0600) at a minimum" % value ) return value certfile = Unicode( u"", config=True, help=_i18n("""The full path to an SSL/TLS certificate file.""") ) keyfile = Unicode( u"", config=True, help=_i18n("""The full path to a private key file for usage with SSL/TLS."""), ) client_ca = Unicode( u"", config=True, help=_i18n( """The full path to a certificate authority certificate for SSL/TLS client authentication.""" ), ) cookie_secret_file = Unicode( config=True, help=_i18n("""The file where the cookie secret is stored.""") ) @default("cookie_secret_file") def _default_cookie_secret_file(self): return os.path.join(self.runtime_dir, "jupyter_cookie_secret") cookie_secret = Bytes( b"", config=True, help="""The random bytes used to secure cookies. By default this is a new random number every time you start the server. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). """, ) @default("cookie_secret") def _default_cookie_secret(self): if os.path.exists(self.cookie_secret_file): with io.open(self.cookie_secret_file, "rb") as f: key = f.read() else: key = encodebytes(os.urandom(32)) self._write_cookie_secret_file(key) h = hmac.new(key, digestmod=hashlib.sha256) h.update(self.password.encode()) return h.digest() def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" self.log.info(_i18n("Writing Jupyter server cookie secret to %s"), self.cookie_secret_file) try: with secure_write(self.cookie_secret_file, True) as f: f.write(secret) except OSError as e: self.log.error( _i18n("Failed to write cookie secret to %s: %s"), self.cookie_secret_file, e ) token = Unicode( "", help=_i18n( """Token used for authenticating first-time connections to the server. The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly with the JUPYTER_TOKEN environment variable. When no password is enabled, the default is to generate a new, random token. Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. """ ), ).tag(config=True) _token_generated = True @default("token") def _token_default(self): if os.getenv("JUPYTER_TOKEN"): self._token_generated = False return os.getenv("JUPYTER_TOKEN") if os.getenv("JUPYTER_TOKEN_FILE"): self._token_generated = False with io.open(os.getenv("JUPYTER_TOKEN_FILE"), "r") as token_file: return token_file.read() if self.password: # no token if password is enabled self._token_generated = False return u"" else: self._token_generated = True return binascii.hexlify(os.urandom(24)).decode("ascii") min_open_files_limit = Integer( config=True, help=""" Gets or sets a lower bound on the open file handles process resource limit. This may need to be increased if you run into an OSError: [Errno 24] Too many open files. This is not applicable when running on Windows. """, allow_none=True, ) @default("min_open_files_limit") def _default_min_open_files_limit(self): if resource is None: # Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows) return None soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) DEFAULT_SOFT = 4096 if hard >= DEFAULT_SOFT: return DEFAULT_SOFT self.log.debug( "Default value for min_open_files_limit is ignored (hard=%r, soft=%r)", hard, soft ) return soft max_body_size = Integer( 512 * 1024 * 1024, config=True, help=""" Sets the maximum allowed size of the client request body, specified in the Content-Length request header field. If the size in a request exceeds the configured value, a malformed HTTP message is returned to the client. Note: max_body_size is applied even in streaming mode. """, ) max_buffer_size = Integer( 512 * 1024 * 1024, config=True, help=""" Gets or sets the maximum amount of memory, in bytes, that is allocated for use by the buffer manager. """, ) @observe("token") def _token_changed(self, change): self._token_generated = False password = Unicode( u"", config=True, help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: from jupyter_server.auth import passwd; passwd() The string should be of the form type:salt:hashed-password. """, ) password_required = Bool( False, config=True, help="""Forces users to use a password for the Jupyter server. This is useful in a multi user environment, for instance when everybody in the LAN can access each other's machine through ssh. In such a case, serving on localhost is not secure since any user can connect to the Jupyter server via ssh. """, ) allow_password_change = Bool( True, config=True, help="""Allow password to be changed at login for the Jupyter server. While logging in with a token, the Jupyter server UI will give the opportunity to the user to enter a new password at the same time that will replace the token login mechanism. This can be set to false to prevent changing password from the UI/API. """, ) disable_check_xsrf = Bool( False, config=True, help="""Disable cross-site-request-forgery protection Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, requiring API requests to either: - originate from pages served by this server (validated with XSRF cookie and token), or - authenticate with a token Some anonymous compute resources still desire the ability to run code, completely without authentication. These services can disable all authentication and security checks, with the full knowledge of what that implies. """, ) allow_remote_access = Bool( config=True, help="""Allow requests where the Host header doesn't point to a local server By default, requests get a 403 forbidden response if the 'Host' header shows that the browser thinks it's on a non-local domain. Setting this option to True disables this check. This protects against 'DNS rebinding' attacks, where a remote web server serves you a page and then changes its DNS to send later requests to a local IP, bypassing same-origin checks. Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along with hostnames configured in local_hostnames. """, ) @default("allow_remote_access") def _default_allow_remote(self): """Disallow remote access if we're listening only on loopback addresses""" # if blank, self.ip was configured to "*" meaning bind to all interfaces, # see _valdate_ip if self.ip == "": return True try: addr = ipaddress.ip_address(self.ip) except ValueError: # Address is a hostname for info in socket.getaddrinfo(self.ip, self.port, 0, socket.SOCK_STREAM): addr = info[4][0] try: parsed = ipaddress.ip_address(addr.split("%")[0]) except ValueError: self.log.warning("Unrecognised IP address: %r", addr) continue # Macs map localhost to 'fe80::1%lo0', a link local address # scoped to the loopback interface. For now, we'll assume that # any scoped link-local address is effectively local. if not (parsed.is_loopback or (("%" in addr) and parsed.is_link_local)): return True return False else: return not addr.is_loopback use_redirect_file = Bool( True, config=True, help="""Disable launching browser by redirect file For versions of notebook > 5.7.2, a security feature measure was added that prevented the authentication token used to launch the browser from being visible. This feature makes it difficult for other users on a multi-user system from running code in your Jupyter session as you. However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), launching a browser using a redirect file can lead the browser failing to load. This is because of the difference in file structures/paths between the runtime and the browser. Disabling this setting to False will disable this behavior, allowing the browser to launch by using a URL and visible token (as before). """, ) local_hostnames = List( Unicode(), ["localhost"], config=True, help="""Hostnames to allow as local when allow_remote_access is False. Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as local as well. """, ) open_browser = Bool( False, config=True, help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (ServerApp.browser) configuration option. """, ) browser = Unicode( u"", config=True, help="""Specify what command to use to invoke a web browser when starting the server. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. """, ) webbrowser_open_new = Integer( 2, config=True, help=_i18n( """Specify where to open the server on startup. This is the `new` argument passed to the standard library method `webbrowser.open`. The behaviour is not guaranteed, but depends on browser support. Valid values are: - 2 opens a new tab, - 1 opens a new window, - 0 opens in an existing window. See the `webbrowser.open` documentation for details. """ ), ) tornado_settings = Dict( config=True, help=_i18n( "Supply overrides for the tornado.web.Application that the " "Jupyter server uses." ), ) websocket_compression_options = Any( None, config=True, help=_i18n( """ Set the tornado compression options for websocket connections. This value will be returned from :meth:`WebSocketHandler.get_compression_options`. None (default) will disable compression. A dict (even an empty one) will enable compression. See the tornado docs for WebSocketHandler.get_compression_options for details. """ ), ) terminado_settings = Dict( config=True, help=_i18n('Supply overrides for terminado. Currently only supports "shell_command".'), ) cookie_options = Dict( config=True, help=_i18n( "Extra keyword arguments to pass to `set_secure_cookie`." " See tornado's set_secure_cookie docs for details." ), ) get_secure_cookie_kwargs = Dict( config=True, help=_i18n( "Extra keyword arguments to pass to `get_secure_cookie`." " See tornado's get_secure_cookie docs for details." ), ) ssl_options = Dict( allow_none=True, config=True, help=_i18n( """Supply SSL options for the tornado HTTPServer. See the tornado docs for details.""" ), ) jinja_environment_options = Dict( config=True, help=_i18n("Supply extra arguments that will be passed to Jinja environment.") ) jinja_template_vars = Dict( config=True, help=_i18n("Extra variables to supply to jinja templates when rendering."), ) base_url = Unicode( "/", config=True, help="""The base URL for the Jupyter server. Leading and trailing slashes can be omitted, and will automatically be added. """, ) @validate("base_url") def _update_base_url(self, proposal): value = proposal["value"] if not value.startswith("/"): value = "/" + value if not value.endswith("/"): value = value + "/" return value extra_static_paths = List( Unicode(), config=True, help="""Extra paths to search for serving static files. This allows adding javascript/css to be available from the Jupyter server machine, or overriding individual files in the IPython""", ) @property def static_file_path(self): """return extra paths + the default location""" return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] static_custom_path = List(Unicode(), help=_i18n("""Path to search for custom.js, css""")) @default("static_custom_path") def _default_static_custom_path(self): return [os.path.join(d, "custom") for d in (self.config_dir, DEFAULT_STATIC_FILES_PATH)] extra_template_paths = List( Unicode(), config=True, help=_i18n( """Extra paths to search for serving jinja templates. Can be used to override templates from jupyter_server.templates.""" ), ) @property def template_file_path(self): """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST extra_services = List( Unicode(), config=True, help=_i18n( """handlers that should be loaded at higher priority than the default services""" ), ) websocket_url = Unicode( "", config=True, help="""The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] """, ) quit_button = Bool( True, config=True, help="""If True, display controls to shut down the Jupyter server, such as menu items or buttons.""", ) # REMOVE in VERSION 2.0 # Temporarily allow content managers to inherit from the 'notebook' # package. We will deprecate this in the next major release. contents_manager_class = TypeFromClasses( default_value=LargeFileManager, klasses=[ "jupyter_server.services.contents.manager.ContentsManager", "notebook.services.contents.manager.ContentsManager", ], config=True, help=_i18n("The content manager class to use."), ) # Throws a deprecation warning to notebook based contents managers. @observe("contents_manager_class") def _observe_contents_manager_class(self, change): new = change["new"] # If 'new' is a class, get a string representing the import # module path. if inspect.isclass(new): new = new.__module__ if new.startswith("notebook"): self.log.warning( "The specified 'contents_manager_class' class inherits a manager from the " "'notebook' package. This is not guaranteed to work in future " "releases of Jupyter Server. Instead, consider switching the " "manager to inherit from the 'jupyter_server' managers. " "Jupyter Server will temporarily allow 'notebook' managers " "until its next major release (2.x)." ) kernel_manager_class = Type( default_value=AsyncMappingKernelManager, klass=MappingKernelManager, config=True, help=_i18n("The kernel manager class to use."), ) session_manager_class = Type( default_value=SessionManager, config=True, help=_i18n("The session manager class to use.") ) config_manager_class = Type( default_value=ConfigManager, config=True, help=_i18n("The config manager class to use") ) kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) kernel_spec_manager_class = Type( default_value=KernelSpecManager, config=True, help=""" The kernel spec manager class to use. Should be a subclass of `jupyter_client.kernelspec.KernelSpecManager`. The Api of KernelSpecManager is provisional and might change without warning between this version of Jupyter and the next stable one. """, ) login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, config=True, help=_i18n("The login handler class to use."), ) logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, config=True, help=_i18n("The logout handler class to use."), ) trust_xheaders = Bool( False, config=True, help=( _i18n( "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" "sent by the upstream reverse proxy. Necessary if the proxy handles SSL" ) ), ) info_file = Unicode() @default("info_file") def _default_info_file(self): info_file = "jpserver-%s.json" % os.getpid() return os.path.join(self.runtime_dir, info_file) browser_open_file = Unicode() @default("browser_open_file") def _default_browser_open_file(self): basename = "jpserver-%s-open.html" % os.getpid() return os.path.join(self.runtime_dir, basename) browser_open_file_to_run = Unicode() @default("browser_open_file_to_run") def _default_browser_open_file_to_run(self): basename = "jpserver-file-to-run-%s-open.html" % os.getpid() return os.path.join(self.runtime_dir, basename) pylab = Unicode( "disabled", config=True, help=_i18n( """ DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. """ ), ) @observe("pylab") def _update_pylab(self, change): """when --pylab is specified, display a warning and exit""" if change["new"] != "warn": backend = " %s" % change["new"] else: backend = "" self.log.error( _i18n("Support for specifying --pylab on the command line has been removed.") ) self.log.error( _i18n("Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.").format( backend ) ) self.exit(1) notebook_dir = Unicode(config=True, help=_i18n("DEPRECATED, use root_dir.")) @observe("notebook_dir") def _update_notebook_dir(self, change): if self._root_dir_set: # only use deprecated config if new config is not set return self.log.warning(_i18n("notebook_dir is deprecated, use root_dir")) self.root_dir = change["new"] root_dir = Unicode(config=True, help=_i18n("The directory to use for notebooks and kernels.")) _root_dir_set = False @default("root_dir") def _default_root_dir(self): if self.file_to_run: self._root_dir_set = True return os.path.dirname(os.path.abspath(self.file_to_run)) else: return os.getcwd() def _normalize_dir(self, value): # Strip any trailing slashes # *except* if it's root _, path = os.path.splitdrive(value) if path == os.sep: return value value = value.rstrip(os.sep) if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) return value @validate("root_dir") def _root_dir_validate(self, proposal): value = self._normalize_dir(proposal["value"]) if not os.path.isdir(value): raise TraitError(trans.gettext("No such directory: '%r'") % value) return value preferred_dir = Unicode( config=True, help=trans.gettext("Preferred starting directory to use for notebooks and kernels."), ) @default("preferred_dir") def _default_prefered_dir(self): return self.root_dir @validate("preferred_dir") def _preferred_dir_validate(self, proposal): value = self._normalize_dir(proposal["value"]) if not os.path.isdir(value): raise TraitError(trans.gettext("No such preferred dir: '%r'") % value) # preferred_dir must be equal or a subdir of root_dir if not value.startswith(self.root_dir): raise TraitError( trans.gettext("preferred_dir must be equal or a subdir of root_dir: '%r'") % value ) return value @observe("root_dir") def _root_dir_changed(self, change): self._root_dir_set = True if not self.preferred_dir.startswith(change["new"]): self.log.warning( trans.gettext("Value of preferred_dir updated to use value of root_dir") ) self.preferred_dir = change["new"] @observe("server_extensions") def _update_server_extensions(self, change): self.log.warning(_i18n("server_extensions is deprecated, use jpserver_extensions")) self.server_extensions = change["new"] jpserver_extensions = Dict( default_value={}, value_trait=Bool(), config=True, help=( _i18n( "Dict of Python modules to load as Jupyter server extensions." "Entry values can be used to enable and disable the loading of" "the extensions. The extensions will be loaded in alphabetical " "order." ) ), ) reraise_server_extension_failures = Bool( False, config=True, help=_i18n("Reraise exceptions encountered loading server extensions?"), ) iopub_msg_rate_limit = Float( 1000, config=True, help=_i18n( """(msgs/sec) Maximum rate at which messages can be sent on iopub before they are limited.""" ), ) iopub_data_rate_limit = Float( 1000000, config=True, help=_i18n( """(bytes/sec) Maximum rate at which stream output can be sent on iopub before they are limited.""" ), ) rate_limit_window = Float( 3, config=True, help=_i18n( """(sec) Time window used to check the message and data rate limits.""" ), ) shutdown_no_activity_timeout = Integer( 0, config=True, help=( "Shut down the server after N seconds with no kernels or " "terminals running and no activity. " "This can be used together with culling idle kernels " "(MappingKernelManager.cull_idle_timeout) to " "shutdown the Jupyter server when it's not in use. This is not " "precisely timed: it may shut down up to a minute later. " "0 (the default) disables this automatic shutdown." ), ) terminals_enabled = Bool( True, config=True, help=_i18n( """Set to False to disable terminals. This does *not* make the server more secure by itself. Anything the user can in a terminal, they can also do in a notebook. Terminals may also be automatically disabled if the terminado package is not available. """ ), ) # Since use of terminals is also a function of whether the terminado package is # available, this variable holds the "final indication" of whether terminal functionality # should be considered (particularly during shutdown/cleanup). It is enabled only # once both the terminals "service" can be initialized and terminals_enabled is True. # Note: this variable is slightly different from 'terminals_available' in the web settings # in that this variable *could* remain false if terminado is available, yet the terminal # service's initialization still fails. As a result, this variable holds the truth. terminals_available = False authenticate_prometheus = Bool( True, help="""" Require authentication to access prometheus metrics. """, config=True, ) _starter_app = Instance( default_value=None, allow_none=True, klass="jupyter_server.extension.application.ExtensionApp", ) @property def starter_app(self): """Get the Extension that started this server.""" return self._starter_app def parse_command_line(self, argv=None): super(ServerApp, self).parse_command_line(argv) if self.extra_args: arg0 = self.extra_args[0] f = os.path.abspath(arg0) self.argv.remove(arg0) if not os.path.exists(f): self.log.critical(_i18n("No such file or directory: %s"), f) self.exit(1) # Use config here, to ensure that it takes higher priority than # anything that comes from the config dirs. c = Config() if os.path.isdir(f): c.ServerApp.root_dir = f elif os.path.isfile(f): c.ServerApp.file_to_run = f self.update_config(c) def init_configurables(self): # If gateway server is configured, replace appropriate managers to perform redirection. To make # this determination, instantiate the GatewayClient config singleton. self.gateway_config = GatewayClient.instance(parent=self) if self.gateway_config.gateway_enabled: self.kernel_manager_class = ( "jupyter_server.gateway.managers.GatewayMappingKernelManager" ) self.session_manager_class = "jupyter_server.gateway.managers.GatewaySessionManager" self.kernel_spec_manager_class = ( "jupyter_server.gateway.managers.GatewayKernelSpecManager" ) self.kernel_spec_manager = self.kernel_spec_manager_class( parent=self, ) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, ) self.contents_manager = self.contents_manager_class( parent=self, log=self.log, ) self.session_manager = self.session_manager_class( parent=self, log=self.log, kernel_manager=self.kernel_manager, contents_manager=self.contents_manager, ) self.config_manager = self.config_manager_class( parent=self, log=self.log, ) def init_logging(self): # This prevents double log messages because tornado use a root logger that # self.log is a child of. The logging module dipatches log messages to a log # and all of its ancenstors until propagate is set to False. self.log.propagate = False for log in app_log, access_log, gen_log: # consistent log output name (ServerApp instead of tornado.access, etc.) log.name = self.log.name # hook up tornado 3's loggers to our app handlers logger = logging.getLogger("tornado") logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_webapp(self): """initialize tornado webapp""" self.tornado_settings["allow_origin"] = self.allow_origin self.tornado_settings["websocket_compression_options"] = self.websocket_compression_options if self.allow_origin_pat: self.tornado_settings["allow_origin_pat"] = re.compile(self.allow_origin_pat) self.tornado_settings["allow_credentials"] = self.allow_credentials self.tornado_settings["autoreload"] = self.autoreload self.tornado_settings["cookie_options"] = self.cookie_options self.tornado_settings["get_secure_cookie_kwargs"] = self.get_secure_cookie_kwargs self.tornado_settings["token"] = self.token # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) if self.password_required and (not self.password): self.log.critical( _i18n("Jupyter servers are configured to only be run with a password.") ) self.log.critical(_i18n("Hint: run the following command to set a password")) self.log.critical(_i18n("\t$ python -m jupyter_server.auth password")) sys.exit(1) # Socket options validation. if self.sock: if self.port != DEFAULT_JUPYTER_SERVER_PORT: self.log.critical( ("Options --port and --sock are mutually exclusive. Aborting."), ) sys.exit(1) else: # Reset the default port if we're using a UNIX socket. self.port = 0 if self.open_browser: # If we're bound to a UNIX socket, we can't reliably connect from a browser. self.log.info( ("Ignoring --ServerApp.open_browser due to --sock being used."), ) if self.file_to_run: self.log.critical( ("Options --ServerApp.file_to_run and --sock are mutually exclusive."), ) sys.exit(1) if sys.platform.startswith("win"): self.log.critical( ( "Option --sock is not supported on Windows, but got value of %s. Aborting." % self.sock ), ) sys.exit(1) self.web_app = ServerWebApplication( self, self.default_services, self.kernel_manager, self.contents_manager, self.session_manager, self.kernel_spec_manager, self.config_manager, self.extra_services, self.log, self.base_url, self.default_url, self.tornado_settings, self.jinja_environment_options, ) if self.certfile: self.ssl_options["certfile"] = self.certfile if self.keyfile: self.ssl_options["keyfile"] = self.keyfile if self.client_ca: self.ssl_options["ca_certs"] = self.client_ca if not self.ssl_options: # could be an empty dict or None # None indicates no SSL config self.ssl_options = None else: # SSL may be missing, so only import it if it's to be used import ssl # PROTOCOL_TLS selects the highest ssl/tls protocol version that both the client and # server support. When PROTOCOL_TLS is not available use PROTOCOL_SSLv23. self.ssl_options.setdefault( "ssl_version", getattr(ssl, "PROTOCOL_TLS", ssl.PROTOCOL_SSLv23) ) if self.ssl_options.get("ca_certs", False): self.ssl_options.setdefault("cert_reqs", ssl.CERT_REQUIRED) ssl_options = self.ssl_options self.login_handler_class.validate_security(self, ssl_options=self.ssl_options) def init_resources(self): """initialize system resources""" if resource is None: self.log.debug( "Ignoring min_open_files_limit because the limit cannot be adjusted (for example, on Windows)" ) return old_soft, old_hard = resource.getrlimit(resource.RLIMIT_NOFILE) soft = self.min_open_files_limit hard = old_hard if old_soft < soft: if hard < soft: hard = soft self.log.debug( "Raising open file limit: soft {}->{}; hard {}->{}".format( old_soft, soft, old_hard, hard ) ) resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) def _get_urlparts(self, path=None, include_token=False): """Constructs a urllib named tuple, ParseResult, with default values set by server config. The returned tuple can be manipulated using the `_replace` method. """ if self.sock: scheme = "http+unix" netloc = urlencode_unix_socket_path(self.sock) else: # Handle nonexplicit hostname. if self.ip in ("", "0.0.0.0"): ip = "%s" % socket.gethostname() else: ip = self.ip netloc = "{ip}:{port}".format(ip=ip, port=self.port) if self.certfile: scheme = "https" else: scheme = "http" if not path: path = self.default_url query = None if include_token: if self.token: # Don't log full token if it came from config token = self.token if self._token_generated else "..." query = urllib.parse.urlencode({"token": token}) # Build the URL Parts to dump. urlparts = urllib.parse.ParseResult( scheme=scheme, netloc=netloc, path=path, params=None, query=query, fragment=None ) return urlparts @property def public_url(self): parts = self._get_urlparts(include_token=True) # Update with custom pieces. if self.custom_display_url: # Parse custom display_url custom = urllib.parse.urlparse(self.custom_display_url)._asdict() # Get pieces that are matter (non None) custom_updates = {key: item for key, item in custom.items() if item} # Update public URL parts with custom pieces. parts = parts._replace(**custom_updates) return parts.geturl() @property def local_url(self): parts = self._get_urlparts(include_token=True) # Update with custom pieces. if not self.sock: parts = parts._replace(netloc="127.0.0.1:{port}".format(port=self.port)) return parts.geturl() @property def display_url(self): """Human readable string with URLs for interacting with the running Jupyter Server """ url = self.public_url + "\n or " + self.local_url return url @property def connection_url(self): urlparts = self._get_urlparts(path=self.base_url) return urlparts.geturl() def init_terminals(self): if not self.terminals_enabled: return try: from jupyter_server.terminal import initialize initialize(self.web_app, self.root_dir, self.connection_url, self.terminado_settings) self.terminals_available = True except ImportError as e: self.log.warning(_i18n("Terminals not available (error was %s)"), e) def init_signal(self): if not sys.platform.startswith("win") and sys.stdin and sys.stdin.isatty(): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) if hasattr(signal, "SIGUSR1"): # Windows doesn't support SIGUSR1 signal.signal(signal.SIGUSR1, self._signal_info) if hasattr(signal, "SIGINFO"): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) def _handle_sigint(self, sig, frame): """SIGINT handler spawns confirmation dialog""" # register more forceful signal handler for ^C^C case signal.signal(signal.SIGINT, self._signal_stop) # request confirmation dialog in bg thread, to avoid # blocking the App thread = threading.Thread(target=self._confirm_exit) thread.daemon = True thread.start() def _restore_sigint_handler(self): """callback for restoring original SIGINT handler""" signal.signal(signal.SIGINT, self._handle_sigint) def _confirm_exit(self): """confirm shutdown on ^C A second ^C, or answering 'y' within 5s will cause shutdown, otherwise original SIGINT handler will be restored. This doesn't work on Windows. """ info = self.log.info info(_i18n("interrupted")) # Check if answer_yes is set if self.answer_yes: self.log.critical(_i18n("Shutting down...")) # schedule stop on the main thread, # since this might be called from a signal handler self.stop(from_signal=True) return print(self.running_server_info()) yes = _i18n("y") no = _i18n("n") sys.stdout.write(_i18n("Shutdown this Jupyter server (%s/[%s])? ") % (yes, no)) sys.stdout.flush() r, w, x = select.select([sys.stdin], [], [], 5) if r: line = sys.stdin.readline() if line.lower().startswith(yes) and no not in line.lower(): self.log.critical(_i18n("Shutdown confirmed")) # schedule stop on the main thread, # since this might be called from a signal handler self.stop(from_signal=True) return else: print(_i18n("No answer for 5s:"), end=" ") print(_i18n("resuming operation...")) # no answer, or answer is no: # set it back to original SIGINT handler # use IOLoop.add_callback because signal.signal must be called # from main thread self.io_loop.add_callback_from_signal(self._restore_sigint_handler) def _signal_stop(self, sig, frame): self.log.critical(_i18n("received signal %s, stopping"), sig) self.stop(from_signal=True) def _signal_info(self, sig, frame): print(self.running_server_info()) def init_components(self): """Check the components submodule, and warn if it's unclean""" # TODO: this should still check, but now we use bower, not git submodule pass def find_server_extensions(self): """ Searches Jupyter paths for jpserver_extensions. """ # Walk through all config files looking for jpserver_extensions. # # Each extension will likely have a JSON config file enabling itself in # the "jupyter_server_config.d" directory. Find each of these and # merge there results in order of precedence. # # Load server extensions with ConfigManager. # This enables merging on keys, which we want for extension enabling. # Regular config loading only merges at the class level, # so each level clobbers the previous. config_paths = jupyter_config_path() if self.config_dir not in config_paths: # add self.config_dir to the front, if set manually config_paths.insert(0, self.config_dir) manager = ExtensionConfigManager(read_config_path=config_paths) extensions = manager.get_jpserver_extensions() for modulename, enabled in sorted(extensions.items()): if modulename not in self.jpserver_extensions: self.config.ServerApp.jpserver_extensions.update({modulename: enabled}) self.jpserver_extensions.update({modulename: enabled}) def init_server_extensions(self): """ If an extension's metadata includes an 'app' key, the value must be a subclass of ExtensionApp. An instance of the class will be created at this step. The config for this instance will inherit the ServerApp's config object and load its own config. """ # Create an instance of the ExtensionManager. self.extension_manager = ExtensionManager(log=self.log, serverapp=self) self.extension_manager.from_jpserver_extensions(self.jpserver_extensions) self.extension_manager.link_all_extensions() def load_server_extensions(self): """Load any extensions specified by config. Import the module, then call the load_jupyter_server_extension function, if one exists. The extension API is experimental, and may change in future releases. """ self.extension_manager.load_all_extensions() def init_mime_overrides(self): # On some Windows machines, an application has registered incorrect # mimetypes in the registry. # Tornado uses this when serving .css and .js files, causing browsers to # reject these files. We know the mimetype always needs to be text/css for css # and application/javascript for JS, so we override it here # and explicitly tell the mimetypes to not trust the Windows registry if os.name == "nt": # do not trust windows registry, which regularly has bad info mimetypes.init(files=[]) # ensure css, js are correct, which are required for pages to function mimetypes.add_type("text/css", ".css") mimetypes.add_type("application/javascript", ".js") # for python <3.8 mimetypes.add_type("application/wasm", ".wasm") def shutdown_no_activity(self): """Shutdown server on timeout when there are no kernels or terminals.""" km = self.kernel_manager if len(km) != 0: return # Kernels still running if self.terminals_available: term_mgr = self.web_app.settings["terminal_manager"] if term_mgr.terminals: return # Terminals still running seconds_since_active = (utcnow() - self.web_app.last_activity()).total_seconds() self.log.debug("No activity for %d seconds.", seconds_since_active) if seconds_since_active > self.shutdown_no_activity_timeout: self.log.info( "No kernels or terminals for %d seconds; shutting down.", seconds_since_active ) self.stop() def init_shutdown_no_activity(self): if self.shutdown_no_activity_timeout > 0: self.log.info( "Will shut down after %d seconds with no kernels or terminals.", self.shutdown_no_activity_timeout, ) pc = ioloop.PeriodicCallback(self.shutdown_no_activity, 60000) pc.start() @property def http_server(self): """An instance of Tornado's HTTPServer class for the Server Web Application.""" try: return self._http_server except AttributeError as e: raise AttributeError( "An HTTPServer instance has not been created for the " "Server Web Application. To create an HTTPServer for this " "application, call `.init_httpserver()`." ) from e def init_httpserver(self): """Creates an instance of a Tornado HTTPServer for the Server Web Application and sets the http_server attribute. """ # Check that a web_app has been initialized before starting a server. if not hasattr(self, "web_app"): raise AttributeError( "A tornado web application has not be initialized. " "Try calling `.init_webapp()` first." ) # Create an instance of the server. self._http_server = httpserver.HTTPServer( self.web_app, ssl_options=self.ssl_options, xheaders=self.trust_xheaders, max_body_size=self.max_body_size, max_buffer_size=self.max_buffer_size, ) success = self._bind_http_server() if not success: self.log.critical( _i18n( "ERROR: the Jupyter server could not be started because " "no available port could be found." ) ) self.exit(1) def _bind_http_server(self): return self._bind_http_server_unix() if self.sock else self._bind_http_server_tcp() def _bind_http_server_unix(self): if unix_socket_in_use(self.sock): self.log.warning(_i18n("The socket %s is already in use.") % self.sock) return False try: sock = bind_unix_socket(self.sock, mode=int(self.sock_mode.encode(), 8)) self.http_server.add_socket(sock) except socket.error as e: if e.errno == errno.EADDRINUSE: self.log.warning(_i18n("The socket %s is already in use.") % self.sock) return False elif e.errno in (errno.EACCES, getattr(errno, "WSAEACCES", errno.EACCES)): self.log.warning(_i18n("Permission to listen on sock %s denied") % self.sock) return False else: raise else: return True def _bind_http_server_tcp(self): success = None for port in random_ports(self.port, self.port_retries + 1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: if self.port_retries: self.log.info( _i18n("The port %i is already in use, trying another port.") % port ) else: self.log.info(_i18n("The port %i is already in use.") % port) continue elif e.errno in (errno.EACCES, getattr(errno, "WSAEACCES", errno.EACCES)): self.log.warning(_i18n("Permission to listen on port %i denied.") % port) continue else: raise else: self.port = port success = True break if not success: if self.port_retries: self.log.critical( _i18n( "ERROR: the Jupyter server could not be started because " "no available port could be found." ) ) else: self.log.critical( _i18n( "ERROR: the Jupyter server could not be started because " "port %i is not available." ) % port ) self.exit(1) return success @staticmethod def _init_asyncio_patch(): """set default asyncio policy to be compatible with tornado Tornado 6.0 is not compatible with default asyncio ProactorEventLoop, which lacks basic *_reader methods. Tornado 6.1 adds a workaround to add these methods in a thread, but SelectorEventLoop should still be preferred to avoid the extra thread for ~all of our events, at least until asyncio adds *_reader methods to proactor. """ if sys.platform.startswith("win") and sys.version_info >= (3, 8): import asyncio try: from asyncio import ( WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy, ) except ImportError: pass # not affected else: if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: # prefer Selector to Proactor for tornado + pyzmq asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) @catch_config_error def initialize( self, argv=None, find_extensions=True, new_httpserver=True, starter_extension=None ): """Initialize the Server application class, configurables, web application, and http server. Parameters ---------- argv : list or None CLI arguments to parse. find_extensions : bool If True, find and load extensions listed in Jupyter config paths. If False, only load extensions that are passed to ServerApp directy through the `argv`, `config`, or `jpserver_extensions` arguments. new_httpserver : bool If True, a tornado HTTPServer instance will be created and configured for the Server Web Application. This will set the http_server attribute of this class. starter_extension : str If given, it references the name of an extension point that started the Server. We will try to load configuration from extension point """ self._init_asyncio_patch() # Parse command line, load ServerApp config files, # and update ServerApp config. super(ServerApp, self).initialize(argv=argv) if self._dispatching: return # Then, use extensions' config loading mechanism to # update config. ServerApp config takes precedence. if find_extensions: self.find_server_extensions() self.init_logging() self.init_server_extensions() # Special case the starter extension and load # any server configuration is provides. if starter_extension: # Configure ServerApp based on named extension. point = self.extension_manager.extension_points[starter_extension] # Set starter_app property. if point.app: self._starter_app = point.app # Load any configuration that comes from the Extension point. self.update_config(Config(point.config)) # Initialize other pieces of the server. self.init_resources() self.init_configurables() self.init_components() self.init_webapp() self.init_terminals() self.init_signal() self.init_ioloop() self.load_server_extensions() self.init_mime_overrides() self.init_shutdown_no_activity() if new_httpserver: self.init_httpserver() async def cleanup_kernels(self): """Shutdown all kernels. The kernels will shutdown themselves when this process no longer exists, but explicit shutdown allows the KernelManagers to cleanup the connection files. """ n_kernels = len(self.kernel_manager.list_kernel_ids()) kernel_msg = trans.ngettext( "Shutting down %d kernel", "Shutting down %d kernels", n_kernels ) self.log.info(kernel_msg % n_kernels) await run_sync_in_loop(self.kernel_manager.shutdown_all()) async def cleanup_terminals(self): """Shutdown all terminals. The terminals will shutdown themselves when this process no longer exists, but explicit shutdown allows the TerminalManager to cleanup. """ if not self.terminals_available: return terminal_manager = self.web_app.settings["terminal_manager"] n_terminals = len(terminal_manager.list()) terminal_msg = trans.ngettext( "Shutting down %d terminal", "Shutting down %d terminals", n_terminals ) self.log.info(terminal_msg % n_terminals) await run_sync_in_loop(terminal_manager.terminate_all()) async def cleanup_extensions(self): """Call shutdown hooks in all extensions.""" n_extensions = len(self.extension_manager.extension_apps) extension_msg = trans.ngettext( "Shutting down %d extension", "Shutting down %d extensions", n_extensions ) self.log.info(extension_msg % n_extensions) await run_sync_in_loop(self.extension_manager.stop_all_extensions()) def running_server_info(self, kernel_count=True): "Return the current working directory and the server url information" info = self.contents_manager.info_string() + "\n" if kernel_count: n_kernels = len(self.kernel_manager.list_kernel_ids()) kernel_msg = trans.ngettext("%d active kernel", "%d active kernels", n_kernels) info += kernel_msg % n_kernels info += "\n" # Format the info so that the URL fits on a single line in 80 char display info += _i18n( "Jupyter Server {version} is running at:\n{url}".format( version=ServerApp.version, url=self.display_url ) ) if self.gateway_config.gateway_enabled: info += ( _i18n("\nKernels will be managed by the Gateway server running at:\n%s") % self.gateway_config.url ) return info def server_info(self): """Return a JSONable dict of information about this server.""" return { "url": self.connection_url, "hostname": self.ip if self.ip else "localhost", "port": self.port, "sock": self.sock, "secure": bool(self.certfile), "base_url": self.base_url, "token": self.token, "root_dir": os.path.abspath(self.root_dir), "password": bool(self.password), "pid": os.getpid(), "version": ServerApp.version, } def write_server_info_file(self): """Write the result of server_info() to the JSON file info_file.""" try: with secure_write(self.info_file) as f: json.dump(self.server_info(), f, indent=2, sort_keys=True) except OSError as e: self.log.error(_i18n("Failed to write server-info to %s: %s"), self.info_file, e) def remove_server_info_file(self): """Remove the jpserver-.json file created for this server. Ignores the error raised when the file has already been removed. """ try: os.unlink(self.info_file) except OSError as e: if e.errno != errno.ENOENT: raise def _resolve_file_to_run_and_root_dir(self): """Returns a relative path from file_to_run to root_dir. If root_dir and file_to_run are incompatible, i.e. on different subtrees, crash the app and log a critical message. Note that if root_dir is not configured and file_to_run is configured, root_dir will be set to the parent directory of file_to_run. """ rootdir_abspath = pathlib.Path(self.root_dir).resolve() file_rawpath = pathlib.Path(self.file_to_run) combined_path = (rootdir_abspath / file_rawpath).resolve() is_child = str(combined_path).startswith(str(rootdir_abspath)) if is_child: if combined_path.parent != rootdir_abspath: self.log.debug( "The `root_dir` trait is set to a directory that's not " "the immediate parent directory of `file_to_run`. Note that " "the server will start at `root_dir` and open the " "the file from the relative path to the `root_dir`." ) return str(combined_path.relative_to(rootdir_abspath)) self.log.critical( "`root_dir` and `file_to_run` are incompatible. They " "don't share the same subtrees. Make sure `file_to_run` " "is on the same path as `root_dir`." ) self.exit(1) def _write_browser_open_file(self, url, fh): if self.token: url = url_concat(url, {"token": self.token}) url = url_path_join(self.connection_url, url) jinja2_env = self.web_app.settings["jinja2_env"] template = jinja2_env.get_template("browser-open.html") fh.write(template.render(open_url=url, base_url=self.base_url)) def write_browser_open_files(self): """Write an `browser_open_file` and `browser_open_file_to_run` files This can be used to open a file directly in a browser. """ # default_url contains base_url, but so does connection_url self.write_browser_open_file() # Create a second browser open file if # file_to_run is set. if self.file_to_run: # Make sure file_to_run and root_dir are compatible. file_to_run_relpath = self._resolve_file_to_run_and_root_dir() file_open_url = url_escape( url_path_join(self.file_url_prefix, *file_to_run_relpath.split(os.sep)) ) with open(self.browser_open_file_to_run, "w", encoding="utf-8") as f: self._write_browser_open_file(file_open_url, f) def write_browser_open_file(self): """Write an jpserver--open.html file This can be used to open the notebook in a browser """ # default_url contains base_url, but so does connection_url open_url = self.default_url[len(self.base_url) :] with open(self.browser_open_file, "w", encoding="utf-8") as f: self._write_browser_open_file(open_url, f) def remove_browser_open_files(self): """Remove the `browser_open_file` and `browser_open_file_to_run` files created for this server. Ignores the error raised when the file has already been removed. """ self.remove_browser_open_file() try: os.unlink(self.browser_open_file_to_run) except OSError as e: if e.errno != errno.ENOENT: raise def remove_browser_open_file(self): """Remove the jpserver--open.html file created for this server. Ignores the error raised when the file has already been removed. """ try: os.unlink(self.browser_open_file) except OSError as e: if e.errno != errno.ENOENT: raise def _prepare_browser_open(self): if not self.use_redirect_file: uri = self.default_url[len(self.base_url) :] if self.token: uri = url_concat(uri, {"token": self.token}) if self.file_to_run: # Create a separate, temporary open-browser-file # pointing at a specific file. open_file = self.browser_open_file_to_run else: # otherwise, just return the usual open browser file. open_file = self.browser_open_file if self.use_redirect_file: assembled_url = urljoin("file:", pathname2url(open_file)) else: assembled_url = url_path_join(self.connection_url, uri) return assembled_url, open_file def launch_browser(self): try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: self.log.warning(_i18n("No web browser found: %s.") % e) browser = None if not browser: return assembled_url, _ = self._prepare_browser_open() b = lambda: browser.open(assembled_url, new=self.webbrowser_open_new) threading.Thread(target=b).start() def start_app(self): super(ServerApp, self).start() if not self.allow_root: # check if we are running as root, and abort if it's not allowed try: uid = os.geteuid() except AttributeError: uid = -1 # anything nonzero here, since we can't check UID assume non-root if uid == 0: self.log.critical( _i18n("Running as root is not recommended. Use --allow-root to bypass.") ) self.exit(1) info = self.log.info for line in self.running_server_info(kernel_count=False).split("\n"): info(line) info( _i18n( "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." ) ) if "dev" in __version__: info( _i18n( "Welcome to Project Jupyter! Explore the various tools available" " and their corresponding documentation. If you are interested" " in contributing to the platform, please visit the community" " resources section at https://jupyter.org/community.html." ) ) self.write_server_info_file() self.write_browser_open_files() # Handle the browser opening. if self.open_browser and not self.sock: self.launch_browser() if self.token and self._token_generated: # log full URL with generated token, so there's a copy/pasteable link # with auth info. if self.sock: self.log.critical( "\n".join( [ "\n", "Jupyter Server is listening on %s" % self.display_url, "", ( "UNIX sockets are not browser-connectable, but you can tunnel to " "the instance via e.g.`ssh -L 8888:%s -N user@this_host` and then " "open e.g. %s in a browser." ) % (self.sock, self.connection_url), ] ) ) else: self.log.critical( "\n".join( [ "\n", "To access the server, open this file in a browser:", " %s" % urljoin("file:", pathname2url(self.browser_open_file)), "Or copy and paste one of these URLs:", " %s" % self.display_url, ] ) ) async def _cleanup(self): """General cleanup of files, extensions and kernels created by this instance ServerApp. """ self.remove_server_info_file() self.remove_browser_open_files() await self.cleanup_extensions() await self.cleanup_kernels() await self.cleanup_terminals() def start_ioloop(self): """Start the IO Loop.""" if sys.platform.startswith("win"): # add no-op to wake every 5s # to handle signals that may be ignored by the inner loop pc = ioloop.PeriodicCallback(lambda: None, 5000) pc.start() try: self.io_loop.start() except KeyboardInterrupt: self.log.info(_i18n("Interrupted...")) def init_ioloop(self): """init self.io_loop so that an extension can use it by io_loop.call_later() to create background tasks""" self.io_loop = ioloop.IOLoop.current() def start(self): """Start the Jupyter server app, after initialization This method takes no arguments so all configuration and initialization must be done prior to calling this method.""" self.start_app() self.start_ioloop() async def _stop(self): """Cleanup resources and stop the IO Loop.""" await self._cleanup() self.io_loop.stop() def stop(self, from_signal=False): """Cleanup resources and stop the server.""" if hasattr(self, "_http_server"): # Stop a server if its set. self.http_server.stop() if getattr(self, "io_loop", None): # use IOLoop.add_callback because signal.signal must be called # from main thread if from_signal: self.io_loop.add_callback_from_signal(self._stop) else: self.io_loop.add_callback(self._stop) def list_running_servers(runtime_dir=None, log=None): """Iterate over the server info files of running Jupyter servers. Given a runtime directory, find jpserver-* files in the security directory, and yield dicts of their information, each one pertaining to a currently running Jupyter server instance. """ if runtime_dir is None: runtime_dir = jupyter_runtime_dir() # The runtime dir might not exist if not os.path.isdir(runtime_dir): return for file_name in os.listdir(runtime_dir): if re.match("jpserver-(.+).json", file_name): with io.open(os.path.join(runtime_dir, file_name), encoding="utf-8") as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field if ("pid" in info) and check_pid(info["pid"]): yield info else: # If the process has died, try to delete its info file try: os.unlink(os.path.join(runtime_dir, file_name)) except OSError as e: if log: log.warning(_i18n("Deleting server info file failed: %s.") % e) # ----------------------------------------------------------------------------- # Main entry point # ----------------------------------------------------------------------------- main = launch_new_instance = ServerApp.launch_instance jupyter_server-1.13.1/jupyter_server/services/000077500000000000000000000000001415445537200216125ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/__init__.py000066400000000000000000000000001415445537200237110ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/api/000077500000000000000000000000001415445537200223635ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/api/__init__.py000066400000000000000000000000001415445537200244620ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/api/api.yaml000066400000000000000000000600551415445537200240260ustar00rootroot00000000000000swagger: "2.0" info: title: Jupyter Server API description: Server API version: "5" contact: name: Jupyter Project url: https://jupyter.org # will be prefixed to all paths basePath: / produces: - application/json consumes: - application/json parameters: kernel: name: kernel_id required: true in: path description: kernel uuid type: string format: uuid session: name: session required: true in: path description: session uuid type: string format: uuid path: name: path required: true in: path description: file path type: string checkpoint_id: name: checkpoint_id required: true in: path description: Checkpoint id for a file type: string section_name: name: section_name required: true in: path description: Name of config section type: string terminal_id: name: terminal_id required: true in: path description: ID of terminal session type: string paths: /api/contents/{path}: parameters: - $ref: "#/parameters/path" get: summary: Get contents of file or directory description: "A client can optionally specify a type and/or format argument via URL parameter. When given, the Contents service shall return a model in the requested type and/or format. If the request cannot be satisfied, e.g. type=text is requested, but the file is binary, then the request shall fail with 400 and have a JSON response containing a 'reason' field, with the value 'bad format' or 'bad type', depending on what was requested." tags: - contents parameters: - name: type in: query description: File type ('file', 'directory') type: string enum: - file - directory - name: format in: query description: "How file content should be returned ('text', 'base64')" type: string enum: - text - base64 - name: content in: query description: "Return content (0 for no content, 1 for return content)" type: integer responses: 404: description: No item found 400: description: Bad request schema: type: object properties: error: type: string description: Error condition reason: type: string description: Explanation of error reason 200: description: Contents of file or directory headers: Last-Modified: description: Last modified date for file type: string format: dateTime schema: $ref: "#/definitions/Contents" 500: description: Model key error post: summary: Create a new file in the specified path description: "A POST to /api/contents/path creates a New untitled, empty file or directory. A POST to /api/contents/path with body {'copy_from': '/path/to/OtherNotebook.ipynb'} creates a new copy of OtherNotebook in path." tags: - contents parameters: - name: model in: body description: Path of file to copy schema: type: object properties: copy_from: type: string ext: type: string type: type: string responses: 201: description: File created headers: Location: description: URL for the new file type: string format: url schema: $ref: "#/definitions/Contents" 404: description: No item found 400: description: Bad request schema: type: object properties: error: type: string description: Error condition reason: type: string description: Explanation of error reason patch: summary: Rename a file or directory without re-uploading content tags: - contents parameters: - name: path in: body required: true description: New path for file or directory. schema: type: object properties: path: type: string format: path description: New path for file or directory responses: 200: description: Path updated headers: Location: description: Updated URL for the file or directory type: string format: url schema: $ref: "#/definitions/Contents" 400: description: No data provided schema: type: object properties: error: type: string description: Error condition reason: type: string description: Explanation of error reason put: summary: Save or upload file. description: "Saves the file in the location specified by name and path. PUT is very similar to POST, but the requester specifies the name, whereas with POST, the server picks the name." tags: - contents parameters: - name: model in: body description: New path for file or directory schema: type: object properties: name: type: string description: The new filename if changed path: type: string description: New path for file or directory type: type: string description: Path dtype ('notebook', 'file', 'directory') format: type: string description: File format ('json', 'text', 'base64') content: type: string description: The actual body of the document excluding directory type responses: 200: description: File saved headers: Location: description: Updated URL for the file or directory type: string format: url schema: $ref: "#/definitions/Contents" 201: description: Path created headers: Location: description: URL for the file or directory type: string format: url schema: $ref: "#/definitions/Contents" 400: description: No data provided schema: type: object properties: error: type: string description: Error condition reason: type: string description: Explanation of error reason delete: summary: Delete a file in the given path tags: - contents responses: 204: description: File deleted headers: Location: description: URL for the removed file type: string format: url /api/contents/{path}/checkpoints: parameters: - $ref: "#/parameters/path" get: summary: Get a list of checkpoints for a file description: List checkpoints for a given file. There will typically be zero or one results. tags: - contents responses: 404: description: No item found 400: description: Bad request schema: type: object properties: error: type: string description: Error condition reason: type: string description: Explanation of error reason 200: description: List of checkpoints for a file schema: type: array items: $ref: "#/definitions/Checkpoints" 500: description: Model key error post: summary: Create a new checkpoint for a file description: "Create a new checkpoint with the current state of a file. With the default FileContentsManager, only one checkpoint is supported, so creating new checkpoints clobbers existing ones." tags: - contents responses: 201: description: Checkpoint created headers: Location: description: URL for the checkpoint type: string format: url schema: $ref: "#/definitions/Checkpoints" 404: description: No item found 400: description: Bad request schema: type: object properties: error: type: string description: Error condition reason: type: string description: Explanation of error reason /api/contents/{path}/checkpoints/{checkpoint_id}: post: summary: Restore a file to a particular checkpointed state parameters: - $ref: "#/parameters/path" - $ref: "#/parameters/checkpoint_id" tags: - contents responses: 204: description: Checkpoint restored 400: description: Bad request schema: type: object properties: error: type: string description: Error condition reason: type: string description: Explanation of error reason delete: summary: Delete a checkpoint parameters: - $ref: "#/parameters/path" - $ref: "#/parameters/checkpoint_id" tags: - contents responses: 204: description: Checkpoint deleted /api/sessions/{session}: parameters: - $ref: "#/parameters/session" get: summary: Get session tags: - sessions responses: 200: description: Session schema: $ref: "#/definitions/Session" patch: summary: "This can be used to rename the session." tags: - sessions parameters: - name: model in: body required: true schema: $ref: "#/definitions/Session" responses: 200: description: Session schema: $ref: "#/definitions/Session" 400: description: No data provided delete: summary: Delete a session tags: - sessions responses: 204: description: Session (and kernel) were deleted 410: description: "Kernel was deleted before the session, and the session was *not* deleted (TODO - check to make sure session wasn't deleted)" /api/sessions: get: summary: List available sessions tags: - sessions responses: 200: description: List of current sessions schema: type: array items: $ref: "#/definitions/Session" post: summary: "Create a new session, or return an existing session if a session of the same name already exists" tags: - sessions parameters: - name: session in: body schema: $ref: "#/definitions/Session" responses: 201: description: Session created or returned schema: $ref: "#/definitions/Session" headers: Location: description: URL for session commands type: string format: url 501: description: Session not available schema: type: object description: error message properties: message: type: string short_message: type: string /api/kernels: get: summary: List the JSON data for all kernels that are currently running tags: - kernels responses: 200: description: List of currently-running kernel uuids schema: type: array items: $ref: "#/definitions/Kernel" post: summary: Start a kernel and return the uuid tags: - kernels parameters: - name: options in: body schema: type: object required: - name properties: name: type: string description: Kernel spec name (defaults to default kernel spec for server) path: type: string description: API path from root to the cwd of the kernel responses: 201: description: Kernel started schema: $ref: "#/definitions/Kernel" headers: Location: description: Model for started kernel type: string format: url /api/kernels/{kernel_id}: parameters: - $ref: "#/parameters/kernel" get: summary: Get kernel information tags: - kernels responses: 200: description: Kernel information schema: $ref: "#/definitions/Kernel" delete: summary: Kill a kernel and delete the kernel id tags: - kernels responses: 204: description: Kernel deleted /api/kernels/{kernel_id}/interrupt: parameters: - $ref: "#/parameters/kernel" post: summary: Interrupt a kernel tags: - kernels responses: 204: description: Kernel interrupted /api/kernels/{kernel_id}/restart: parameters: - $ref: "#/parameters/kernel" post: summary: Restart a kernel tags: - kernels responses: 200: description: Kernel restarted headers: Location: description: URL for kernel commands type: string format: url schema: $ref: "#/definitions/Kernel" /api/kernelspecs: get: summary: Get kernel specs tags: - kernelspecs responses: 200: description: Kernel specs schema: type: object properties: default: type: string description: Default kernel name kernelspecs: type: object additionalProperties: $ref: "#/definitions/KernelSpec" /api/config/{section_name}: get: summary: Get a configuration section by name parameters: - $ref: "#/parameters/section_name" tags: - config responses: 200: description: Configuration object schema: type: object patch: summary: Update a configuration section by name tags: - config parameters: - $ref: "#/parameters/section_name" - name: configuration in: body schema: type: object responses: 200: description: Configuration object schema: type: object /api/terminals: get: summary: Get available terminals tags: - terminals responses: 200: description: A list of all available terminal ids. schema: type: array items: $ref: "#/definitions/Terminal" 403: description: Forbidden to access 404: description: Not found post: summary: Create a new terminal tags: - terminals responses: 200: description: Succesfully created a new terminal schema: $ref: "#/definitions/Terminal" 403: description: Forbidden to access 404: description: Not found /api/terminals/{terminal_id}: get: summary: Get a terminal session corresponding to an id. tags: - terminals parameters: - $ref: "#/parameters/terminal_id" responses: 200: description: Terminal session with given id schema: $ref: "#/definitions/Terminal" 403: description: Forbidden to access 404: description: Not found delete: summary: Delete a terminal session corresponding to an id. tags: - terminals parameters: - $ref: "#/parameters/terminal_id" responses: 204: description: Succesfully deleted terminal session 403: description: Forbidden to access 404: description: Not found /api/status: get: summary: Get the current status/activity of the server. tags: - status responses: 200: description: The current status of the server schema: $ref: "#/definitions/APIStatus" /api/spec.yaml: get: summary: Get the current spec for the notebook server's APIs. tags: - api-spec produces: - text/x-yaml responses: 200: description: The current spec for the notebook server's APIs. schema: type: file definitions: APIStatus: description: | Notebook server API status. Added in notebook 5.0. properties: started: type: string description: | ISO8601 timestamp indicating when the notebook server started. last_activity: type: string description: | ISO8601 timestamp indicating the last activity on the server, either on the REST API or kernel activity. connections: type: number description: | The total number of currently open connections to kernels. kernels: type: number description: | The total number of running kernels. KernelSpec: description: Kernel spec (contents of kernel.json) properties: name: type: string description: Unique name for kernel KernelSpecFile: $ref: "#/definitions/KernelSpecFile" resources: type: object properties: kernel.js: type: string format: filename description: path for kernel.js file kernel.css: type: string format: filename description: path for kernel.css file logo-*: type: string format: filename description: path for logo file. Logo filenames are of the form `logo-widthxheight` KernelSpecFile: description: Kernel spec json file required: - argv - display_name - language properties: language: type: string description: The programming language which this kernel runs. This will be stored in notebook metadata. argv: type: array description: "A list of command line arguments used to start the kernel. The text `{connection_file}` in any argument will be replaced with the path to the connection file." items: type: string display_name: type: string description: "The kernel's name as it should be displayed in the UI. Unlike the kernel name used in the API, this can contain arbitrary unicode characters." codemirror_mode: type: string description: Codemirror mode. Can be a string *or* an valid Codemirror mode object. This defaults to the string from the `language` property. env: type: object description: A dictionary of environment variables to set for the kernel. These will be added to the current environment variables. additionalProperties: type: string help_links: type: array description: Help items to be displayed in the help menu in the notebook UI. items: type: object required: - text - url properties: text: type: string description: menu item link text url: type: string format: URL description: menu item link url Kernel: description: Kernel information required: - id - name properties: id: type: string format: uuid description: uuid of kernel name: type: string description: kernel spec name last_activity: type: string description: | ISO 8601 timestamp for the last-seen activity on this kernel. Use this in combination with execution_state == 'idle' to identify which kernels have been idle since a given time. Timestamps will be UTC, indicated 'Z' suffix. Added in notebook server 5.0. connections: type: number description: | The number of active connections to this kernel. execution_state: type: string description: | Current execution state of the kernel (typically 'idle' or 'busy', but may be other values, such as 'starting'). Added in notebook server 5.0. Session: description: A session type: object properties: id: type: string format: uuid path: type: string description: path to the session name: type: string description: name of the session type: type: string description: session type kernel: $ref: "#/definitions/Kernel" Contents: description: "A contents object. The content and format keys may be null if content is not contained. If type is 'file', then the mimetype will be null." type: object required: - type - name - path - writable - created - last_modified - mimetype - format - content properties: name: type: string description: "Name of file or directory, equivalent to the last part of the path" path: type: string description: Full path for file or directory type: type: string description: Type of content enum: - directory - file - notebook writable: type: boolean description: indicates whether the requester has permission to edit the file created: type: string description: Creation timestamp format: dateTime last_modified: type: string description: Last modified timestamp format: dateTime size: type: integer description: "The size of the file or notebook in bytes. If no size is provided, defaults to null." mimetype: type: string description: "The mimetype of a file. If content is not null, and type is 'file', this will contain the mimetype of the file, otherwise this will be null." content: type: string description: "The content, if requested (otherwise null). Will be an array if type is 'directory'" format: type: string description: Format of content (one of null, 'text', 'base64', 'json') Checkpoints: description: A checkpoint object. type: object required: - id - last_modified properties: id: type: string description: Unique id for the checkpoint. last_modified: type: string description: Last modified timestamp format: dateTime Terminal: description: A Terminal object type: object required: - name properties: name: type: string description: name of terminal last_activity: type: string description: | ISO 8601 timestamp for the last-seen activity on this terminal. Use this to identify which terminals have been inactive since a given time. Timestamps will be UTC, indicated 'Z' suffix. jupyter_server-1.13.1/jupyter_server/services/api/handlers.py000066400000000000000000000031561415445537200245420ustar00rootroot00000000000000"""Tornado handlers for api specifications.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json import os from tornado import web from ...base.handlers import APIHandler from ...base.handlers import JupyterHandler from jupyter_server._tz import isoformat from jupyter_server._tz import utcfromtimestamp from jupyter_server.utils import ensure_async class APISpecHandler(web.StaticFileHandler, JupyterHandler): def initialize(self): web.StaticFileHandler.initialize(self, path=os.path.dirname(__file__)) @web.authenticated def get(self): self.log.warning("Serving api spec (experimental, incomplete)") return web.StaticFileHandler.get(self, "api.yaml") def get_content_type(self): return "text/x-yaml" class APIStatusHandler(APIHandler): _track_activity = False @web.authenticated async def get(self): # if started was missing, use unix epoch started = self.settings.get("started", utcfromtimestamp(0)) started = isoformat(started) kernels = await ensure_async(self.kernel_manager.list_kernels()) total_connections = sum(k["connections"] for k in kernels) last_activity = isoformat(self.application.last_activity()) model = { "started": started, "last_activity": last_activity, "kernels": len(kernels), "connections": total_connections, } self.finish(json.dumps(model, sort_keys=True)) default_handlers = [ (r"/api/spec.yaml", APISpecHandler), (r"/api/status", APIStatusHandler), ] jupyter_server-1.13.1/jupyter_server/services/config/000077500000000000000000000000001415445537200230575ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/config/__init__.py000066400000000000000000000000431415445537200251650ustar00rootroot00000000000000from .manager import ConfigManager jupyter_server-1.13.1/jupyter_server/services/config/handlers.py000066400000000000000000000020231415445537200252260ustar00rootroot00000000000000"""Tornado handlers for frontend config storage.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json from tornado import web from ...base.handlers import APIHandler class ConfigHandler(APIHandler): @web.authenticated def get(self, section_name): self.set_header("Content-Type", "application/json") self.finish(json.dumps(self.config_manager.get(section_name))) @web.authenticated def put(self, section_name): data = self.get_json_body() # Will raise 400 if content is not valid JSON self.config_manager.set(section_name, data) self.set_status(204) @web.authenticated def patch(self, section_name): new_data = self.get_json_body() section = self.config_manager.update(section_name, new_data) self.finish(json.dumps(section)) # URL to handler mappings section_name_regex = r"(?P\w+)" default_handlers = [ (r"/api/config/%s" % section_name_regex, ConfigHandler), ] jupyter_server-1.13.1/jupyter_server/services/config/manager.py000066400000000000000000000044541415445537200250520ustar00rootroot00000000000000"""Manager to read and modify frontend config data in JSON files. """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os.path from jupyter_core.paths import jupyter_config_dir from jupyter_core.paths import jupyter_config_path from traitlets import default from traitlets import Instance from traitlets import List from traitlets import observe from traitlets import Unicode from traitlets.config import LoggingConfigurable from jupyter_server.config_manager import BaseJSONConfigManager from jupyter_server.config_manager import recursive_update class ConfigManager(LoggingConfigurable): """Config Manager used for storing frontend config""" config_dir_name = Unicode("serverconfig", help="""Name of the config directory.""").tag( config=True ) # Public API def get(self, section_name): """Get the config from all config sections.""" config = {} # step through back to front, to ensure front of the list is top priority for p in self.read_config_path[::-1]: cm = BaseJSONConfigManager(config_dir=p) recursive_update(config, cm.get(section_name)) return config def set(self, section_name, data): """Set the config only to the user's config.""" return self.write_config_manager.set(section_name, data) def update(self, section_name, new_data): """Update the config only to the user's config.""" return self.write_config_manager.update(section_name, new_data) # Private API read_config_path = List(Unicode()) @default("read_config_path") def _default_read_config_path(self): return [os.path.join(p, self.config_dir_name) for p in jupyter_config_path()] write_config_dir = Unicode() @default("write_config_dir") def _default_write_config_dir(self): return os.path.join(jupyter_config_dir(), self.config_dir_name) write_config_manager = Instance(BaseJSONConfigManager) @default("write_config_manager") def _default_write_config_manager(self): return BaseJSONConfigManager(config_dir=self.write_config_dir) @observe("write_config_dir") def _update_write_config_dir(self, change): self.write_config_manager = BaseJSONConfigManager(config_dir=self.write_config_dir) jupyter_server-1.13.1/jupyter_server/services/contents/000077500000000000000000000000001415445537200234475ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/contents/__init__.py000066400000000000000000000000001415445537200255460ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/contents/checkpoints.py000066400000000000000000000217431415445537200263420ustar00rootroot00000000000000""" Classes for managing Checkpoints. """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from tornado.web import HTTPError from traitlets.config.configurable import LoggingConfigurable class Checkpoints(LoggingConfigurable): """ Base class for managing checkpoints for a ContentsManager. Subclasses are required to implement: create_checkpoint(self, contents_mgr, path) restore_checkpoint(self, contents_mgr, checkpoint_id, path) rename_checkpoint(self, checkpoint_id, old_path, new_path) delete_checkpoint(self, checkpoint_id, path) list_checkpoints(self, path) """ def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" raise NotImplementedError("must be implemented in a subclass") def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint""" raise NotImplementedError("must be implemented in a subclass") def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a single checkpoint from old_path to new_path.""" raise NotImplementedError("must be implemented in a subclass") def delete_checkpoint(self, checkpoint_id, path): """delete a checkpoint for a file""" raise NotImplementedError("must be implemented in a subclass") def list_checkpoints(self, path): """Return a list of checkpoints for a given file""" raise NotImplementedError("must be implemented in a subclass") def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" for cp in self.list_checkpoints(old_path): self.rename_checkpoint(cp["id"], old_path, new_path) def delete_all_checkpoints(self, path): """Delete all checkpoints for the given path.""" for checkpoint in self.list_checkpoints(path): self.delete_checkpoint(checkpoint["id"], path) class GenericCheckpointsMixin(object): """ Helper for creating Checkpoints subclasses that can be used with any ContentsManager. Provides a ContentsManager-agnostic implementation of `create_checkpoint` and `restore_checkpoint` in terms of the following operations: - create_file_checkpoint(self, content, format, path) - create_notebook_checkpoint(self, nb, path) - get_file_checkpoint(self, checkpoint_id, path) - get_notebook_checkpoint(self, checkpoint_id, path) To create a generic CheckpointManager, add this mixin to a class that implement the above four methods plus the remaining Checkpoints API methods: - delete_checkpoint(self, checkpoint_id, path) - list_checkpoints(self, path) - rename_checkpoint(self, checkpoint_id, old_path, new_path) """ def create_checkpoint(self, contents_mgr, path): model = contents_mgr.get(path, content=True) type = model["type"] if type == "notebook": return self.create_notebook_checkpoint( model["content"], path, ) elif type == "file": return self.create_file_checkpoint( model["content"], model["format"], path, ) else: raise HTTPError(500, u"Unexpected type %s" % type) def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" type = contents_mgr.get(path, content=False)["type"] if type == "notebook": model = self.get_notebook_checkpoint(checkpoint_id, path) elif type == "file": model = self.get_file_checkpoint(checkpoint_id, path) else: raise HTTPError(500, u"Unexpected type %s" % type) contents_mgr.save(model, path) # Required Methods def create_file_checkpoint(self, content, format, path): """Create a checkpoint of the current state of a file Returns a checkpoint model for the new checkpoint. """ raise NotImplementedError("must be implemented in a subclass") def create_notebook_checkpoint(self, nb, path): """Create a checkpoint of the current state of a file Returns a checkpoint model for the new checkpoint. """ raise NotImplementedError("must be implemented in a subclass") def get_file_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a non-notebook file. Returns a dict of the form: { 'type': 'file', 'content': , 'format': {'text','base64'}, } """ raise NotImplementedError("must be implemented in a subclass") def get_notebook_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a notebook. Returns a dict of the form: { 'type': 'notebook', 'content': , } """ raise NotImplementedError("must be implemented in a subclass") class AsyncCheckpoints(Checkpoints): """ Base class for managing checkpoints for a ContentsManager asynchronously. """ async def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" raise NotImplementedError("must be implemented in a subclass") async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint""" raise NotImplementedError("must be implemented in a subclass") async def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a single checkpoint from old_path to new_path.""" raise NotImplementedError("must be implemented in a subclass") async def delete_checkpoint(self, checkpoint_id, path): """delete a checkpoint for a file""" raise NotImplementedError("must be implemented in a subclass") async def list_checkpoints(self, path): """Return a list of checkpoints for a given file""" raise NotImplementedError("must be implemented in a subclass") async def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" for cp in await self.list_checkpoints(old_path): await self.rename_checkpoint(cp["id"], old_path, new_path) async def delete_all_checkpoints(self, path): """Delete all checkpoints for the given path.""" for checkpoint in await self.list_checkpoints(path): await self.delete_checkpoint(checkpoint["id"], path) class AsyncGenericCheckpointsMixin(GenericCheckpointsMixin): """ Helper for creating Asynchronous Checkpoints subclasses that can be used with any ContentsManager. """ async def create_checkpoint(self, contents_mgr, path): model = await contents_mgr.get(path, content=True) type = model["type"] if type == "notebook": return await self.create_notebook_checkpoint( model["content"], path, ) elif type == "file": return await self.create_file_checkpoint( model["content"], model["format"], path, ) else: raise HTTPError(500, u"Unexpected type %s" % type) async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" type = await contents_mgr.get(path, content=False)["type"] if type == "notebook": model = await self.get_notebook_checkpoint(checkpoint_id, path) elif type == "file": model = await self.get_file_checkpoint(checkpoint_id, path) else: raise HTTPError(500, u"Unexpected type %s" % type) await contents_mgr.save(model, path) # Required Methods async def create_file_checkpoint(self, content, format, path): """Create a checkpoint of the current state of a file Returns a checkpoint model for the new checkpoint. """ raise NotImplementedError("must be implemented in a subclass") async def create_notebook_checkpoint(self, nb, path): """Create a checkpoint of the current state of a file Returns a checkpoint model for the new checkpoint. """ raise NotImplementedError("must be implemented in a subclass") async def get_file_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a non-notebook file. Returns a dict of the form: { 'type': 'file', 'content': , 'format': {'text','base64'}, } """ raise NotImplementedError("must be implemented in a subclass") async def get_notebook_checkpoint(self, checkpoint_id, path): """Get the content of a checkpoint for a notebook. Returns a dict of the form: { 'type': 'notebook', 'content': , } """ raise NotImplementedError("must be implemented in a subclass") jupyter_server-1.13.1/jupyter_server/services/contents/filecheckpoints.py000066400000000000000000000276561415445537200272130ustar00rootroot00000000000000""" File-based Checkpoints implementations. """ import os import shutil from anyio.to_thread import run_sync from jupyter_core.utils import ensure_dir_exists from tornado.web import HTTPError from traitlets import Unicode from .checkpoints import AsyncCheckpoints from .checkpoints import AsyncGenericCheckpointsMixin from .checkpoints import Checkpoints from .checkpoints import GenericCheckpointsMixin from .fileio import AsyncFileManagerMixin from .fileio import FileManagerMixin from jupyter_server import _tz as tz class FileCheckpoints(FileManagerMixin, Checkpoints): """ A Checkpoints that caches checkpoints for files in adjacent directories. Only works with FileContentsManager. Use GenericFileCheckpoints if you want file-based checkpoints with another ContentsManager. """ checkpoint_dir = Unicode( ".ipynb_checkpoints", config=True, help="""The directory name in which to keep file checkpoints This is a path relative to the file's own directory. By default, it is .ipynb_checkpoints """, ) root_dir = Unicode(config=True) def _root_dir_default(self): try: return self.parent.root_dir except AttributeError: return os.getcwd() # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" checkpoint_id = u"checkpoint" src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) self._copy(src_path, dest_path) return self.checkpoint_model(checkpoint_id, dest_path) def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" src_path = self.checkpoint_path(checkpoint_id, path) dest_path = contents_mgr._get_os_path(path) self._copy(src_path, dest_path) # ContentsManager-independent checkpoint API def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a checkpoint from old_path to new_path.""" old_cp_path = self.checkpoint_path(checkpoint_id, old_path) new_cp_path = self.checkpoint_path(checkpoint_id, new_path) if os.path.isfile(old_cp_path): self.log.debug( "Renaming checkpoint %s -> %s", old_cp_path, new_cp_path, ) with self.perm_to_403(): shutil.move(old_cp_path, new_cp_path) def delete_checkpoint(self, checkpoint_id, path): """delete a file's checkpoint""" path = path.strip("/") cp_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(cp_path): self.no_such_checkpoint(path, checkpoint_id) self.log.debug("unlinking %s", cp_path) with self.perm_to_403(): os.unlink(cp_path) def list_checkpoints(self, path): """list the checkpoints for a given file This contents manager currently only supports one checkpoint per file. """ path = path.strip("/") checkpoint_id = "checkpoint" os_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_path): return [] else: return [self.checkpoint_model(checkpoint_id, os_path)] # Checkpoint-related utilities def checkpoint_path(self, checkpoint_id, path): """find the path to a checkpoint""" path = path.strip("/") parent, name = ("/" + path).rsplit("/", 1) parent = parent.strip("/") basename, ext = os.path.splitext(name) filename = u"{name}-{checkpoint_id}{ext}".format( name=basename, checkpoint_id=checkpoint_id, ext=ext, ) os_path = self._get_os_path(path=parent) cp_dir = os.path.join(os_path, self.checkpoint_dir) with self.perm_to_403(): ensure_dir_exists(cp_dir) cp_path = os.path.join(cp_dir, filename) return cp_path def checkpoint_model(self, checkpoint_id, os_path): """construct the info dict for a given checkpoint""" stats = os.stat(os_path) last_modified = tz.utcfromtimestamp(stats.st_mtime) info = dict( id=checkpoint_id, last_modified=last_modified, ) return info # Error Handling def no_such_checkpoint(self, path, checkpoint_id): raise HTTPError(404, u"Checkpoint does not exist: %s@%s" % (path, checkpoint_id)) class AsyncFileCheckpoints(FileCheckpoints, AsyncFileManagerMixin, AsyncCheckpoints): async def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" checkpoint_id = u"checkpoint" src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) await self._copy(src_path, dest_path) return await self.checkpoint_model(checkpoint_id, dest_path) async def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" src_path = self.checkpoint_path(checkpoint_id, path) dest_path = contents_mgr._get_os_path(path) await self._copy(src_path, dest_path) async def checkpoint_model(self, checkpoint_id, os_path): """construct the info dict for a given checkpoint""" stats = await run_sync(os.stat, os_path) last_modified = tz.utcfromtimestamp(stats.st_mtime) info = dict( id=checkpoint_id, last_modified=last_modified, ) return info # ContentsManager-independent checkpoint API async def rename_checkpoint(self, checkpoint_id, old_path, new_path): """Rename a checkpoint from old_path to new_path.""" old_cp_path = self.checkpoint_path(checkpoint_id, old_path) new_cp_path = self.checkpoint_path(checkpoint_id, new_path) if os.path.isfile(old_cp_path): self.log.debug( "Renaming checkpoint %s -> %s", old_cp_path, new_cp_path, ) with self.perm_to_403(): await run_sync(shutil.move, old_cp_path, new_cp_path) async def delete_checkpoint(self, checkpoint_id, path): """delete a file's checkpoint""" path = path.strip("/") cp_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(cp_path): self.no_such_checkpoint(path, checkpoint_id) self.log.debug("unlinking %s", cp_path) with self.perm_to_403(): await run_sync(os.unlink, cp_path) async def list_checkpoints(self, path): """list the checkpoints for a given file This contents manager currently only supports one checkpoint per file. """ path = path.strip("/") checkpoint_id = "checkpoint" os_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_path): return [] else: return [await self.checkpoint_model(checkpoint_id, os_path)] class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints): """ Local filesystem Checkpoints that works with any conforming ContentsManager. """ def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): self._save_file(os_checkpoint_path, content, format=format) # return the checkpoint info return self.checkpoint_model(checkpoint_id, os_checkpoint_path) def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): self._save_notebook(os_checkpoint_path, nb) # return the checkpoint info return self.checkpoint_model(checkpoint_id, os_checkpoint_path) def get_notebook_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a notebook.""" path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) return { "type": "notebook", "content": self._read_notebook( os_checkpoint_path, as_version=4, ), } def get_file_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a file.""" path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) content, format = self._read_file(os_checkpoint_path, format=None) return { "type": "file", "content": content, "format": format, } class AsyncGenericFileCheckpoints(AsyncGenericCheckpointsMixin, AsyncFileCheckpoints): """ Asynchronous Local filesystem Checkpoints that works with any conforming ContentsManager. """ async def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): await self._save_file(os_checkpoint_path, content, format=format) # return the checkpoint info return await self.checkpoint_model(checkpoint_id, os_checkpoint_path) async def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): await self._save_notebook(os_checkpoint_path, nb) # return the checkpoint info return await self.checkpoint_model(checkpoint_id, os_checkpoint_path) async def get_notebook_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a notebook.""" path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) return { "type": "notebook", "content": await self._read_notebook( os_checkpoint_path, as_version=4, ), } async def get_file_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a file.""" path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) content, format = await self._read_file(os_checkpoint_path, format=None) return { "type": "file", "content": content, "format": format, } jupyter_server-1.13.1/jupyter_server/services/contents/fileio.py000066400000000000000000000354461415445537200253040ustar00rootroot00000000000000""" Utilities for file-based Contents/Checkpoints managers. """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import errno import io import os import shutil from base64 import decodebytes from base64 import encodebytes from contextlib import contextmanager from functools import partial import nbformat from anyio.to_thread import run_sync from tornado.web import HTTPError from traitlets import Bool from traitlets.config import Configurable from jupyter_server.utils import to_api_path from jupyter_server.utils import to_os_path def replace_file(src, dst): """replace dst with src""" os.replace(src, dst) async def async_replace_file(src, dst): """replace dst with src asynchronously""" await run_sync(os.replace, src, dst) def copy2_safe(src, dst, log=None): """copy src to dst like shutil.copy2, but log errors in copystat instead of raising """ shutil.copyfile(src, dst) try: shutil.copystat(src, dst) except OSError: if log: log.debug("copystat on %s failed", dst, exc_info=True) async def async_copy2_safe(src, dst, log=None): """copy src to dst asynchronously like shutil.copy2, but log errors in copystat instead of raising """ await run_sync(shutil.copyfile, src, dst) try: await run_sync(shutil.copystat, src, dst) except OSError: if log: log.debug("copystat on %s failed", dst, exc_info=True) def path_to_intermediate(path): """Name of the intermediate file used in atomic writes. The .~ prefix will make Dropbox ignore the temporary file.""" dirname, basename = os.path.split(path) return os.path.join(dirname, ".~" + basename) def path_to_invalid(path): """Name of invalid file after a failed atomic write and subsequent read.""" dirname, basename = os.path.split(path) return os.path.join(dirname, basename + ".invalid") @contextmanager def atomic_writing(path, text=True, encoding="utf-8", log=None, **kwargs): """Context manager to write to a file only if the entire write is successful. This works by copying the previous file contents to a temporary file in the same directory, and renaming that file back to the target if the context exits with an error. If the context is successful, the new data is synced to disk and the temporary file is removed. Parameters ---------- path : str The target file to write to. text : bool, optional Whether to open the file in text mode (i.e. to write unicode). Default is True. encoding : str, optional The encoding to use for files opened in text mode. Default is UTF-8. **kwargs Passed to :func:`io.open`. """ # realpath doesn't work on Windows: https://bugs.python.org/issue9949 # Luckily, we only need to resolve the file itself being a symlink, not # any of its directories, so this will suffice: if os.path.islink(path): path = os.path.join(os.path.dirname(path), os.readlink(path)) tmp_path = path_to_intermediate(path) if os.path.isfile(path): copy2_safe(path, tmp_path, log=log) if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") fileobj = io.open(path, "w", encoding=encoding, **kwargs) else: fileobj = io.open(path, "wb", **kwargs) try: yield fileobj except: # Failed! Move the backup file back to the real path to avoid corruption fileobj.close() replace_file(tmp_path, path) raise # Flush to disk fileobj.flush() os.fsync(fileobj.fileno()) fileobj.close() # Written successfully, now remove the backup copy if os.path.isfile(tmp_path): os.remove(tmp_path) @contextmanager def _simple_writing(path, text=True, encoding="utf-8", log=None, **kwargs): """Context manager to write file without doing atomic writing (for weird filesystem eg: nfs). Parameters ---------- path : str The target file to write to. text : bool, optional Whether to open the file in text mode (i.e. to write unicode). Default is True. encoding : str, optional The encoding to use for files opened in text mode. Default is UTF-8. **kwargs Passed to :func:`io.open`. """ # realpath doesn't work on Windows: https://bugs.python.org/issue9949 # Luckily, we only need to resolve the file itself being a symlink, not # any of its directories, so this will suffice: if os.path.islink(path): path = os.path.join(os.path.dirname(path), os.readlink(path)) if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") fileobj = io.open(path, "w", encoding=encoding, **kwargs) else: fileobj = io.open(path, "wb", **kwargs) try: yield fileobj except: fileobj.close() raise fileobj.close() class FileManagerMixin(Configurable): """ Mixin for ContentsAPI classes that interact with the filesystem. Provides facilities for reading, writing, and copying files. Shared by FileContentsManager and FileCheckpoints. Note ---- Classes using this mixin must provide the following attributes: root_dir : unicode A directory against against which API-style paths are to be resolved. log : logging.Logger """ use_atomic_writing = Bool( True, config=True, help="""By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""", ) @contextmanager def open(self, os_path, *args, **kwargs): """wrapper around io.open that turns permission errors into 403""" with self.perm_to_403(os_path): with io.open(os_path, *args, **kwargs) as f: yield f @contextmanager def atomic_writing(self, os_path, *args, **kwargs): """wrapper around atomic_writing that turns permission errors to 403. Depending on flag 'use_atomic_writing', the wrapper perform an actual atomic writing or simply writes the file (whatever an old exists or not)""" with self.perm_to_403(os_path): if self.use_atomic_writing: with atomic_writing(os_path, *args, log=self.log, **kwargs) as f: yield f else: with _simple_writing(os_path, *args, log=self.log, **kwargs) as f: yield f @contextmanager def perm_to_403(self, os_path=""): """context manager for turning permission errors into 403.""" try: yield except (OSError, IOError) as e: if e.errno in {errno.EPERM, errno.EACCES}: # make 403 error message without root prefix # this may not work perfectly on unicode paths on Python 2, # but nobody should be doing that anyway. if not os_path: os_path = e.filename or "unknown file" path = to_api_path(os_path, root=self.root_dir) raise HTTPError(403, u"Permission denied: %s" % path) from e else: raise def _copy(self, src, dest): """copy src to dest like shutil.copy2, but log errors in copystat """ copy2_safe(src, dest, log=self.log) def _get_os_path(self, path): """Given an API path, return its file system path. Parameters ---------- path : string The relative API path to the named file. Returns ------- path : string Native, absolute OS path to for a file. Raises ------ 404: if path is outside root """ root = os.path.abspath(self.root_dir) os_path = to_os_path(path, root) if not (os.path.abspath(os_path) + os.path.sep).startswith(root): raise HTTPError(404, "%s is outside root contents directory" % path) return os_path def _read_notebook(self, os_path, as_version=4): """Read a notebook from an os path.""" with self.open(os_path, "r", encoding="utf-8") as f: try: return nbformat.read(f, as_version=as_version) except Exception as e: e_orig = e # If use_atomic_writing is enabled, we'll guess that it was also # enabled when this notebook was written and look for a valid # atomic intermediate. tmp_path = path_to_intermediate(os_path) if not self.use_atomic_writing or not os.path.exists(tmp_path): raise HTTPError( 400, u"Unreadable Notebook: %s %r" % (os_path, e_orig), ) # Move the bad file aside, restore the intermediate, and try again. invalid_file = path_to_invalid(os_path) replace_file(os_path, invalid_file) replace_file(tmp_path, os_path) return self._read_notebook(os_path, as_version) def _save_notebook(self, os_path, nb): """Save a notebook to an os_path.""" with self.atomic_writing(os_path, encoding="utf-8") as f: nbformat.write(nb, f, version=nbformat.NO_CONVERT) def _read_file(self, os_path, format): """Read a non-notebook file. os_path: The path to be read. format: If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 """ if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) with self.open(os_path, "rb") as f: bcontent = f.read() if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: return bcontent.decode("utf8"), "text" except UnicodeError as e: if format == "text": raise HTTPError( 400, "%s is not UTF-8 encoded" % os_path, reason="bad format", ) from e return encodebytes(bcontent).decode("ascii"), "base64" def _save_file(self, os_path, content, format): """Save content of a generic file.""" if format not in {"text", "base64"}: raise HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: if format == "text": bcontent = content.encode("utf8") else: b64_bytes = content.encode("ascii") bcontent = decodebytes(b64_bytes) except Exception as e: raise HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.atomic_writing(os_path, text=False) as f: f.write(bcontent) class AsyncFileManagerMixin(FileManagerMixin): """ Mixin for ContentsAPI classes that interact with the filesystem asynchronously. """ async def _copy(self, src, dest): """copy src to dest like shutil.copy2, but log errors in copystat """ await async_copy2_safe(src, dest, log=self.log) async def _read_notebook(self, os_path, as_version=4): """Read a notebook from an os path.""" with self.open(os_path, "r", encoding="utf-8") as f: try: return await run_sync(partial(nbformat.read, as_version=as_version), f) except Exception as e: e_orig = e # If use_atomic_writing is enabled, we'll guess that it was also # enabled when this notebook was written and look for a valid # atomic intermediate. tmp_path = path_to_intermediate(os_path) if not self.use_atomic_writing or not os.path.exists(tmp_path): raise HTTPError( 400, u"Unreadable Notebook: %s %r" % (os_path, e_orig), ) # Move the bad file aside, restore the intermediate, and try again. invalid_file = path_to_invalid(os_path) await async_replace_file(os_path, invalid_file) await async_replace_file(tmp_path, os_path) return await self._read_notebook(os_path, as_version) async def _save_notebook(self, os_path, nb): """Save a notebook to an os_path.""" with self.atomic_writing(os_path, encoding="utf-8") as f: await run_sync(partial(nbformat.write, version=nbformat.NO_CONVERT), nb, f) async def _read_file(self, os_path, format): """Read a non-notebook file. os_path: The path to be read. format: If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 """ if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) with self.open(os_path, "rb") as f: bcontent = await run_sync(f.read) if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: return bcontent.decode("utf8"), "text" except UnicodeError as e: if format == "text": raise HTTPError( 400, "%s is not UTF-8 encoded" % os_path, reason="bad format", ) from e return encodebytes(bcontent).decode("ascii"), "base64" async def _save_file(self, os_path, content, format): """Save content of a generic file.""" if format not in {"text", "base64"}: raise HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: if format == "text": bcontent = content.encode("utf8") else: b64_bytes = content.encode("ascii") bcontent = decodebytes(b64_bytes) except Exception as e: raise HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.atomic_writing(os_path, text=False) as f: await run_sync(f.write, bcontent) jupyter_server-1.13.1/jupyter_server/services/contents/filemanager.py000066400000000000000000001014321415445537200262740ustar00rootroot00000000000000"""A contents manager that uses the local file system for storage.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import errno import mimetypes import os import shutil import stat import sys from datetime import datetime import nbformat from anyio.to_thread import run_sync from ipython_genutils.importstring import import_item from jupyter_core.paths import exists from jupyter_core.paths import is_file_hidden from jupyter_core.paths import is_hidden from send2trash import send2trash from tornado import web from traitlets import Any from traitlets import Bool from traitlets import default from traitlets import TraitError from traitlets import Unicode from traitlets import validate from .filecheckpoints import AsyncFileCheckpoints from .filecheckpoints import FileCheckpoints from .fileio import AsyncFileManagerMixin from .fileio import FileManagerMixin from .manager import AsyncContentsManager from .manager import ContentsManager from jupyter_server import _tz as tz from jupyter_server.base.handlers import AuthenticatedFileHandler from jupyter_server.transutils import _i18n try: from os.path import samefile except ImportError: # windows + py2 from jupyter_server.utils import samefile_simple as samefile _script_exporter = None class FileContentsManager(FileManagerMixin, ContentsManager): root_dir = Unicode(config=True) @default("root_dir") def _default_root_dir(self): try: return self.parent.root_dir except AttributeError: return os.getcwd() post_save_hook = Any( None, config=True, allow_none=True, help="""Python callable or importstring thereof to be called on the path of a file just saved. This can be used to process the file on disk, such as converting the notebook to a script or HTML via nbconvert. It will be called as (all arguments passed by keyword):: hook(os_path=os_path, model=model, contents_manager=instance) - path: the filesystem path to the file just written - model: the model representing the file - contents_manager: this ContentsManager instance """, ) @validate("post_save_hook") def _validate_post_save_hook(self, proposal): value = proposal["value"] if isinstance(value, str): value = import_item(value) if not callable(value): raise TraitError("post_save_hook must be callable") return value def run_post_save_hook(self, model, os_path): """Run the post-save hook if defined, and log errors""" if self.post_save_hook: try: self.log.debug("Running post-save hook on %s", os_path) self.post_save_hook(os_path=os_path, model=model, contents_manager=self) except Exception as e: self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True) raise web.HTTPError( 500, u"Unexpected error while running post hook save: %s" % e ) from e @validate("root_dir") def _validate_root_dir(self, proposal): """Do a bit of validation of the root_dir.""" value = proposal["value"] if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) if not os.path.isdir(value): raise TraitError("%r is not a directory" % value) return value @default("checkpoints_class") def _checkpoints_class_default(self): return FileCheckpoints delete_to_trash = Bool( True, config=True, help="""If True (default), deleting files will send them to the platform's trash/recycle bin, where they can be recovered. If False, deleting files really deletes them.""", ) always_delete_dir = Bool( False, config=True, help="""If True, deleting a non-empty directory will always be allowed. WARNING this may result in files being permanently removed; e.g. on Windows, if the data size is too big for the trash/recycle bin the directory will be permanently deleted. If False (default), the non-empty directory will be sent to the trash only if safe. And if ``delete_to_trash`` is True, the directory won't be deleted.""", ) @default("files_handler_class") def _files_handler_class_default(self): return AuthenticatedFileHandler @default("files_handler_params") def _files_handler_params_default(self): return {"path": self.root_dir} def is_hidden(self, path): """Does the API style path correspond to a hidden directory or file? Parameters ---------- path : string The path to check. This is an API path (`/` separated, relative to root_dir). Returns ------- hidden : bool Whether the path exists and is hidden. """ path = path.strip("/") os_path = self._get_os_path(path=path) return is_hidden(os_path, self.root_dir) def file_exists(self, path): """Returns True if the file exists, else returns False. API-style wrapper for os.path.isfile Parameters ---------- path : string The relative path to the file (with '/' as separator) Returns ------- exists : bool Whether the file exists. """ path = path.strip("/") os_path = self._get_os_path(path) return os.path.isfile(os_path) def dir_exists(self, path): """Does the API-style path refer to an extant directory? API-style wrapper for os.path.isdir Parameters ---------- path : string The path to check. This is an API path (`/` separated, relative to root_dir). Returns ------- exists : bool Whether the path is indeed a directory. """ path = path.strip("/") os_path = self._get_os_path(path=path) return os.path.isdir(os_path) def exists(self, path): """Returns True if the path exists, else returns False. API-style wrapper for os.path.exists Parameters ---------- path : string The API path to the file (with '/' as separator) Returns ------- exists : bool Whether the target exists. """ path = path.strip("/") os_path = self._get_os_path(path=path) return exists(os_path) def _base_model(self, path): """Build the common base of a contents model""" os_path = self._get_os_path(path) info = os.lstat(os_path) try: # size of file size = info.st_size except (ValueError, OSError): self.log.warning("Unable to get size.") size = None try: last_modified = tz.utcfromtimestamp(info.st_mtime) except (ValueError, OSError): # Files can rarely have an invalid timestamp # https://github.com/jupyter/notebook/issues/2539 # https://github.com/jupyter/notebook/issues/2757 # Use the Unix epoch as a fallback so we don't crash. self.log.warning("Invalid mtime %s for %s", info.st_mtime, os_path) last_modified = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) try: created = tz.utcfromtimestamp(info.st_ctime) except (ValueError, OSError): # See above self.log.warning("Invalid ctime %s for %s", info.st_ctime, os_path) created = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) # Create the base model. model = {} model["name"] = path.rsplit("/", 1)[-1] model["path"] = path model["last_modified"] = last_modified model["created"] = created model["content"] = None model["format"] = None model["mimetype"] = None model["size"] = size try: model["writable"] = os.access(os_path, os.W_OK) except OSError: self.log.error("Failed to check write permissions on %s", os_path) model["writable"] = False return model def _dir_model(self, path, content=True): """Build a model for a directory if content is requested, will include a listing of the directory """ os_path = self._get_os_path(path) four_o_four = u"directory does not exist: %r" % path if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) raise web.HTTPError(404, four_o_four) model = self._base_model(path) model["type"] = "directory" model["size"] = None if content: model["content"] = contents = [] os_dir = self._get_os_path(path) for name in os.listdir(os_dir): try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: self.log.warning("failed to decode filename '%s': %s", name, e) continue try: st = os.lstat(os_path) except OSError as e: # skip over broken symlinks in listing if e.errno == errno.ENOENT: self.log.warning("%s doesn't exist", os_path) elif e.errno != errno.EACCES: # Don't provide clues about protected files self.log.warning("Error stat-ing %s: %s", os_path, e) continue if ( not stat.S_ISLNK(st.st_mode) and not stat.S_ISREG(st.st_mode) and not stat.S_ISDIR(st.st_mode) ): self.log.debug("%s not a regular file", os_path) continue try: if self.should_list(name): if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): contents.append(self.get(path="%s/%s" % (path, name), content=False)) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: self.log.warning( "Unknown error checking if file %r is hidden", os_path, exc_info=True, ) model["format"] = "json" return model def _file_model(self, path, content=True, format=None): """Build a model for a file if content is requested, include the file contents. format: If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 """ model = self._base_model(path) model["type"] = "file" os_path = self._get_os_path(path) model["mimetype"] = mimetypes.guess_type(os_path)[0] if content: content, format = self._read_file(os_path, format) if model["mimetype"] is None: default_mime = { "text": "text/plain", "base64": "application/octet-stream", }[format] model["mimetype"] = default_mime model.update( content=content, format=format, ) return model def _notebook_model(self, path, content=True): """Build a notebook model if content is requested, the notebook content will be populated as a JSON structure (not double-serialized) """ model = self._base_model(path) model["type"] = "notebook" os_path = self._get_os_path(path) if content: nb = self._read_notebook(os_path, as_version=4) self.mark_trusted_cells(nb, path) model["content"] = nb model["format"] = "json" self.validate_notebook_model(model) return model def get(self, path, content=True, type=None, format=None): """Takes a path for an entity and returns its model Parameters ---------- path : str the API path that describes the relative path for the target content : bool Whether to include the contents in the reply type : str, optional The requested type - 'file', 'notebook', or 'directory'. Will raise HTTPError 400 if the content doesn't match. format : str, optional The requested format for file contents. 'text' or 'base64'. Ignored if this returns a notebook or directory model. Returns ------- model : dict the contents model. If content=True, returns the contents of the file or directory as well. """ path = path.strip("/") if not self.exists(path): raise web.HTTPError(404, u"No such file or directory: %s" % path) os_path = self._get_os_path(path) if os.path.isdir(os_path): if type not in (None, "directory"): raise web.HTTPError( 400, u"%s is a directory, not a %s" % (path, type), reason="bad type", ) model = self._dir_model(path, content=content) elif type == "notebook" or (type is None and path.endswith(".ipynb")): model = self._notebook_model(path, content=content) else: if type == "directory": raise web.HTTPError(400, u"%s is not a directory" % path, reason="bad type") model = self._file_model(path, content=content, format=format) return model def _save_directory(self, os_path, model, path=""): """create a directory""" if is_hidden(os_path, self.root_dir) and not self.allow_hidden: raise web.HTTPError(400, u"Cannot create hidden directory %r" % os_path) if not os.path.exists(os_path): with self.perm_to_403(): os.mkdir(os_path) elif not os.path.isdir(os_path): raise web.HTTPError(400, u"Not a directory: %s" % (os_path)) else: self.log.debug("Directory %r already exists", os_path) def save(self, model, path=""): """Save the file model and return the model with no content.""" path = path.strip("/") if "type" not in model: raise web.HTTPError(400, u"No file type provided") if "content" not in model and model["type"] != "directory": raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) self.run_pre_save_hook(model=model, path=path) try: if model["type"] == "notebook": nb = nbformat.from_dict(model["content"]) self.check_and_sign(nb, path) self._save_notebook(os_path, nb) # One checkpoint should always exist for notebooks. if not self.checkpoints.list_checkpoints(path): self.create_checkpoint(path) elif model["type"] == "file": # Missing format will be handled internally by _save_file. self._save_file(os_path, model["content"], model.get("format")) elif model["type"] == "directory": self._save_directory(os_path, model, path) else: raise web.HTTPError(400, "Unhandled contents type: %s" % model["type"]) except web.HTTPError: raise except Exception as e: self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) raise web.HTTPError( 500, u"Unexpected error while saving file: %s %s" % (path, e) ) from e validation_message = None if model["type"] == "notebook": self.validate_notebook_model(model) validation_message = model.get("message", None) model = self.get(path, content=False) if validation_message: model["message"] = validation_message self.run_post_save_hook(model=model, os_path=os_path) return model def delete_file(self, path): """Delete file at path.""" path = path.strip("/") os_path = self._get_os_path(path) rm = os.unlink if not os.path.exists(os_path): raise web.HTTPError(404, u"File or directory does not exist: %s" % os_path) def _check_trash(os_path): if sys.platform in {"win32", "darwin"}: return True # It's a bit more nuanced than this, but until we can better # distinguish errors from send2trash, assume that we can only trash # files on the same partition as the home directory. file_dev = os.stat(os_path).st_dev home_dev = os.stat(os.path.expanduser("~")).st_dev return file_dev == home_dev def is_non_empty_dir(os_path): if os.path.isdir(os_path): # A directory containing only leftover checkpoints is # considered empty. cp_dir = getattr(self.checkpoints, "checkpoint_dir", None) if set(os.listdir(os_path)) - {cp_dir}: return True return False if self.delete_to_trash: if not self.always_delete_dir and sys.platform == "win32" and is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. raise web.HTTPError(400, u"Directory %s not empty" % os_path) if _check_trash(os_path): self.log.debug("Sending %s to trash", os_path) # Looking at the code in send2trash, I don't think the errors it # raises let us distinguish permission errors from other errors in # code. So for now, just let them all get logged as server errors. send2trash(os_path) return else: self.log.warning( "Skipping trash for %s, on different device " "to home directory", os_path, ) if os.path.isdir(os_path): # Don't permanently delete non-empty directories. if not self.always_delete_dir and is_non_empty_dir(os_path): raise web.HTTPError(400, u"Directory %s not empty" % os_path) self.log.debug("Removing directory %s", os_path) with self.perm_to_403(): shutil.rmtree(os_path) else: self.log.debug("Unlinking file %s", os_path) with self.perm_to_403(): rm(os_path) def rename_file(self, old_path, new_path): """Rename a file.""" old_path = old_path.strip("/") new_path = new_path.strip("/") if new_path == old_path: return new_os_path = self._get_os_path(new_path) old_os_path = self._get_os_path(old_path) # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): raise web.HTTPError(409, u"File already exists: %s" % new_path) # Move the file try: with self.perm_to_403(): shutil.move(old_os_path, new_os_path) except web.HTTPError: raise except Exception as e: raise web.HTTPError(500, u"Unknown error renaming file: %s %s" % (old_path, e)) from e def info_string(self): return _i18n("Serving notebooks from local directory: %s") % self.root_dir def get_kernel_path(self, path, model=None): """Return the initial API path of a kernel associated with a given notebook""" if self.dir_exists(path): return path if "/" in path: parent_dir = path.rsplit("/", 1)[0] else: parent_dir = "" return parent_dir class AsyncFileContentsManager(FileContentsManager, AsyncFileManagerMixin, AsyncContentsManager): @default("checkpoints_class") def _checkpoints_class_default(self): return AsyncFileCheckpoints async def _dir_model(self, path, content=True): """Build a model for a directory if content is requested, will include a listing of the directory """ os_path = self._get_os_path(path) four_o_four = u"directory does not exist: %r" % path if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: self.log.info("Refusing to serve hidden directory %r, via 404 Error", os_path) raise web.HTTPError(404, four_o_four) model = self._base_model(path) model["type"] = "directory" model["size"] = None if content: model["content"] = contents = [] os_dir = self._get_os_path(path) dir_contents = await run_sync(os.listdir, os_dir) for name in dir_contents: try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: self.log.warning("failed to decode filename '%s': %s", name, e) continue try: st = await run_sync(os.lstat, os_path) except OSError as e: # skip over broken symlinks in listing if e.errno == errno.ENOENT: self.log.warning("%s doesn't exist", os_path) elif e.errno != errno.EACCES: # Don't provide clues about protected files self.log.warning("Error stat-ing %s: %s", os_path, e) continue if ( not stat.S_ISLNK(st.st_mode) and not stat.S_ISREG(st.st_mode) and not stat.S_ISDIR(st.st_mode) ): self.log.debug("%s not a regular file", os_path) continue try: if self.should_list(name): if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): contents.append( await self.get(path="%s/%s" % (path, name), content=False) ) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: self.log.warning( "Unknown error checking if file %r is hidden", os_path, exc_info=True, ) model["format"] = "json" return model async def _file_model(self, path, content=True, format=None): """Build a model for a file if content is requested, include the file contents. format: If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 """ model = self._base_model(path) model["type"] = "file" os_path = self._get_os_path(path) model["mimetype"] = mimetypes.guess_type(os_path)[0] if content: content, format = await self._read_file(os_path, format) if model["mimetype"] is None: default_mime = { "text": "text/plain", "base64": "application/octet-stream", }[format] model["mimetype"] = default_mime model.update( content=content, format=format, ) return model async def _notebook_model(self, path, content=True): """Build a notebook model if content is requested, the notebook content will be populated as a JSON structure (not double-serialized) """ model = self._base_model(path) model["type"] = "notebook" os_path = self._get_os_path(path) if content: nb = await self._read_notebook(os_path, as_version=4) self.mark_trusted_cells(nb, path) model["content"] = nb model["format"] = "json" self.validate_notebook_model(model) return model async def get(self, path, content=True, type=None, format=None): """Takes a path for an entity and returns its model Parameters ---------- path : str the API path that describes the relative path for the target content : bool Whether to include the contents in the reply type : str, optional The requested type - 'file', 'notebook', or 'directory'. Will raise HTTPError 400 if the content doesn't match. format : str, optional The requested format for file contents. 'text' or 'base64'. Ignored if this returns a notebook or directory model. Returns ------- model : dict the contents model. If content=True, returns the contents of the file or directory as well. """ path = path.strip("/") if not self.exists(path): raise web.HTTPError(404, u"No such file or directory: %s" % path) os_path = self._get_os_path(path) if os.path.isdir(os_path): if type not in (None, "directory"): raise web.HTTPError( 400, u"%s is a directory, not a %s" % (path, type), reason="bad type", ) model = await self._dir_model(path, content=content) elif type == "notebook" or (type is None and path.endswith(".ipynb")): model = await self._notebook_model(path, content=content) else: if type == "directory": raise web.HTTPError(400, u"%s is not a directory" % path, reason="bad type") model = await self._file_model(path, content=content, format=format) return model async def _save_directory(self, os_path, model, path=""): """create a directory""" if is_hidden(os_path, self.root_dir) and not self.allow_hidden: raise web.HTTPError(400, u"Cannot create hidden directory %r" % os_path) if not os.path.exists(os_path): with self.perm_to_403(): await run_sync(os.mkdir, os_path) elif not os.path.isdir(os_path): raise web.HTTPError(400, u"Not a directory: %s" % (os_path)) else: self.log.debug("Directory %r already exists", os_path) async def save(self, model, path=""): """Save the file model and return the model with no content.""" path = path.strip("/") if "type" not in model: raise web.HTTPError(400, u"No file type provided") if "content" not in model and model["type"] != "directory": raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) self.run_pre_save_hook(model=model, path=path) try: if model["type"] == "notebook": nb = nbformat.from_dict(model["content"]) self.check_and_sign(nb, path) await self._save_notebook(os_path, nb) # One checkpoint should always exist for notebooks. if not (await self.checkpoints.list_checkpoints(path)): await self.create_checkpoint(path) elif model["type"] == "file": # Missing format will be handled internally by _save_file. await self._save_file(os_path, model["content"], model.get("format")) elif model["type"] == "directory": await self._save_directory(os_path, model, path) else: raise web.HTTPError(400, "Unhandled contents type: %s" % model["type"]) except web.HTTPError: raise except Exception as e: self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) raise web.HTTPError( 500, u"Unexpected error while saving file: %s %s" % (path, e) ) from e validation_message = None if model["type"] == "notebook": self.validate_notebook_model(model) validation_message = model.get("message", None) model = await self.get(path, content=False) if validation_message: model["message"] = validation_message self.run_post_save_hook(model=model, os_path=os_path) return model async def delete_file(self, path): """Delete file at path.""" path = path.strip("/") os_path = self._get_os_path(path) rm = os.unlink if not os.path.exists(os_path): raise web.HTTPError(404, u"File or directory does not exist: %s" % os_path) async def _check_trash(os_path): if sys.platform in {"win32", "darwin"}: return True # It's a bit more nuanced than this, but until we can better # distinguish errors from send2trash, assume that we can only trash # files on the same partition as the home directory. file_dev = (await run_sync(os.stat, os_path)).st_dev home_dev = (await run_sync(os.stat, os.path.expanduser("~"))).st_dev return file_dev == home_dev async def is_non_empty_dir(os_path): if os.path.isdir(os_path): # A directory containing only leftover checkpoints is # considered empty. cp_dir = getattr(self.checkpoints, "checkpoint_dir", None) dir_contents = set(await run_sync(os.listdir, os_path)) if dir_contents - {cp_dir}: return True return False if self.delete_to_trash: if ( not self.always_delete_dir and sys.platform == "win32" and await is_non_empty_dir(os_path) ): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. raise web.HTTPError(400, u"Directory %s not empty" % os_path) if await _check_trash(os_path): self.log.debug("Sending %s to trash", os_path) # Looking at the code in send2trash, I don't think the errors it # raises let us distinguish permission errors from other errors in # code. So for now, just let them all get logged as server errors. send2trash(os_path) return else: self.log.warning( "Skipping trash for %s, on different device " "to home directory", os_path, ) if os.path.isdir(os_path): # Don't permanently delete non-empty directories. if not self.always_delete_dir and await is_non_empty_dir(os_path): raise web.HTTPError(400, u"Directory %s not empty" % os_path) self.log.debug("Removing directory %s", os_path) with self.perm_to_403(): await run_sync(shutil.rmtree, os_path) else: self.log.debug("Unlinking file %s", os_path) with self.perm_to_403(): await run_sync(rm, os_path) async def rename_file(self, old_path, new_path): """Rename a file.""" old_path = old_path.strip("/") new_path = new_path.strip("/") if new_path == old_path: return new_os_path = self._get_os_path(new_path) old_os_path = self._get_os_path(old_path) # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): raise web.HTTPError(409, u"File already exists: %s" % new_path) # Move the file try: with self.perm_to_403(): await run_sync(shutil.move, old_os_path, new_os_path) except web.HTTPError: raise except Exception as e: raise web.HTTPError(500, u"Unknown error renaming file: %s %s" % (old_path, e)) from e jupyter_server-1.13.1/jupyter_server/services/contents/handlers.py000066400000000000000000000257341415445537200256340ustar00rootroot00000000000000"""Tornado handlers for the contents web service. Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-27%3A-Contents-Service """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json try: from jupyter_client.jsonutil import json_default except ImportError: from jupyter_client.jsonutil import date_default as json_default from tornado import web from jupyter_server.base.handlers import APIHandler from jupyter_server.base.handlers import JupyterHandler from jupyter_server.base.handlers import path_regex from jupyter_server.utils import ensure_async from jupyter_server.utils import url_escape from jupyter_server.utils import url_path_join def validate_model(model, expect_content): """ Validate a model returned by a ContentsManager method. If expect_content is True, then we expect non-null entries for 'content' and 'format'. """ required_keys = { "name", "path", "type", "writable", "created", "last_modified", "mimetype", "content", "format", } missing = required_keys - set(model.keys()) if missing: raise web.HTTPError( 500, u"Missing Model Keys: {missing}".format(missing=missing), ) maybe_none_keys = ["content", "format"] if expect_content: errors = [key for key in maybe_none_keys if model[key] is None] if errors: raise web.HTTPError( 500, u"Keys unexpectedly None: {keys}".format(keys=errors), ) else: errors = {key: model[key] for key in maybe_none_keys if model[key] is not None} if errors: raise web.HTTPError( 500, u"Keys unexpectedly not None: {keys}".format(keys=errors), ) class ContentsHandler(APIHandler): def location_url(self, path): """Return the full URL location of a file. Parameters ---------- path : unicode The API path of the file, such as "foo/bar.txt". """ return url_path_join(self.base_url, "api", "contents", url_escape(path)) def _finish_model(self, model, location=True): """Finish a JSON request with a model, setting relevant headers, etc.""" if location: location = self.location_url(model["path"]) self.set_header("Location", location) self.set_header("Last-Modified", model["last_modified"]) self.set_header("Content-Type", "application/json") self.finish(json.dumps(model, default=json_default)) @web.authenticated async def get(self, path=""): """Return a model for a file or directory. A directory model contains a list of models (without content) of the files and directories it contains. """ path = path or "" type = self.get_query_argument("type", default=None) if type not in {None, "directory", "file", "notebook"}: raise web.HTTPError(400, u"Type %r is invalid" % type) format = self.get_query_argument("format", default=None) if format not in {None, "text", "base64"}: raise web.HTTPError(400, u"Format %r is invalid" % format) content = self.get_query_argument("content", default="1") if content not in {"0", "1"}: raise web.HTTPError(400, u"Content %r is invalid" % content) content = int(content) model = await ensure_async( self.contents_manager.get( path=path, type=type, format=format, content=content, ) ) validate_model(model, expect_content=content) self._finish_model(model, location=False) @web.authenticated async def patch(self, path=""): """PATCH renames a file or directory without re-uploading content.""" cm = self.contents_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, u"JSON body missing") model = await ensure_async(cm.update(model, path)) validate_model(model, expect_content=False) self._finish_model(model) async def _copy(self, copy_from, copy_to=None): """Copy a file, optionally specifying a target directory.""" self.log.info( u"Copying {copy_from} to {copy_to}".format( copy_from=copy_from, copy_to=copy_to or "", ) ) model = await ensure_async(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) async def _upload(self, model, path): """Handle upload of a new file to path""" self.log.info(u"Uploading file to %s", path) model = await ensure_async(self.contents_manager.new(model, path)) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) async def _new_untitled(self, path, type="", ext=""): """Create a new, empty untitled entity""" self.log.info(u"Creating new %s in %s", type or "file", path) model = await ensure_async( self.contents_manager.new_untitled(path=path, type=type, ext=ext) ) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) async def _save(self, model, path): """Save an existing file.""" chunk = model.get("chunk", None) if not chunk or chunk == -1: # Avoid tedious log information self.log.info(u"Saving file at %s", path) model = await ensure_async(self.contents_manager.save(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @web.authenticated async def post(self, path=""): """Create a new file in the specified path. POST creates new files. The server always decides on the name. POST /api/contents/path New untitled, empty file or directory. POST /api/contents/path with body {"copy_from" : "/path/to/OtherNotebook.ipynb"} New copy of OtherNotebook in path """ cm = self.contents_manager file_exists = await ensure_async(cm.file_exists(path)) if file_exists: raise web.HTTPError(400, "Cannot POST to files, use PUT instead.") dir_exists = await ensure_async(cm.dir_exists(path)) if not dir_exists: raise web.HTTPError(404, "No such directory: %s" % path) model = self.get_json_body() if model is not None: copy_from = model.get("copy_from") ext = model.get("ext", "") type = model.get("type", "") if copy_from: await self._copy(copy_from, path) else: await self._new_untitled(path, type=type, ext=ext) else: await self._new_untitled(path) @web.authenticated async def put(self, path=""): """Saves the file in the location specified by name and path. PUT is very similar to POST, but the requester specifies the name, whereas with POST, the server picks the name. PUT /api/contents/path/Name.ipynb Save notebook at ``path/Name.ipynb``. Notebook structure is specified in `content` key of JSON request body. If content is not specified, create a new empty notebook. """ model = self.get_json_body() if model: if model.get("copy_from"): raise web.HTTPError(400, "Cannot copy with PUT, only POST") exists = await ensure_async(self.contents_manager.file_exists(path)) if exists: await self._save(model, path) else: await self._upload(model, path) else: await self._new_untitled(path) @web.authenticated async def delete(self, path=""): """delete a file in the given path""" cm = self.contents_manager self.log.warning("delete %s", path) await ensure_async(cm.delete(path)) self.set_status(204) self.finish() class CheckpointsHandler(APIHandler): @web.authenticated async def get(self, path=""): """get lists checkpoints for a file""" cm = self.contents_manager checkpoints = await ensure_async(cm.list_checkpoints(path)) data = json.dumps(checkpoints, default=json_default) self.finish(data) @web.authenticated async def post(self, path=""): """post creates a new checkpoint""" cm = self.contents_manager checkpoint = await ensure_async(cm.create_checkpoint(path)) data = json.dumps(checkpoint, default=json_default) location = url_path_join( self.base_url, "api/contents", url_escape(path), "checkpoints", url_escape(checkpoint["id"]), ) self.set_header("Location", location) self.set_status(201) self.finish(data) class ModifyCheckpointsHandler(APIHandler): @web.authenticated async def post(self, path, checkpoint_id): """post restores a file from a checkpoint""" cm = self.contents_manager await ensure_async(cm.restore_checkpoint(checkpoint_id, path)) self.set_status(204) self.finish() @web.authenticated async def delete(self, path, checkpoint_id): """delete clears a checkpoint for a given file""" cm = self.contents_manager await ensure_async(cm.delete_checkpoint(checkpoint_id, path)) self.set_status(204) self.finish() class NotebooksRedirectHandler(JupyterHandler): """Redirect /api/notebooks to /api/contents""" SUPPORTED_METHODS = ("GET", "PUT", "PATCH", "POST", "DELETE") def get(self, path): self.log.warning("/api/notebooks is deprecated, use /api/contents") self.redirect(url_path_join(self.base_url, "api/contents", url_escape(path))) put = patch = post = delete = get class TrustNotebooksHandler(JupyterHandler): """ Handles trust/signing of notebooks """ @web.authenticated async def post(self, path=""): cm = self.contents_manager await ensure_async(cm.trust_notebook(path)) self.set_status(201) self.finish() # ----------------------------------------------------------------------------- # URL to handler mappings # ----------------------------------------------------------------------------- _checkpoint_id_regex = r"(?P[\w-]+)" default_handlers = [ (r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler), ( r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), ModifyCheckpointsHandler, ), (r"/api/contents%s/trust" % path_regex, TrustNotebooksHandler), (r"/api/contents%s" % path_regex, ContentsHandler), (r"/api/notebooks/?(.*)", NotebooksRedirectHandler), ] jupyter_server-1.13.1/jupyter_server/services/contents/largefilemanager.py000066400000000000000000000134501415445537200273110ustar00rootroot00000000000000import base64 import io import os from anyio.to_thread import run_sync from tornado import web from jupyter_server.services.contents.filemanager import AsyncFileContentsManager from jupyter_server.services.contents.filemanager import FileContentsManager class LargeFileManager(FileContentsManager): """Handle large file upload.""" def save(self, model, path=""): """Save the file model and return the model with no content.""" chunk = model.get("chunk", None) if chunk is not None: path = path.strip("/") if "type" not in model: raise web.HTTPError(400, u"No file type provided") if model["type"] != "file": raise web.HTTPError( 400, u'File type "{}" is not supported for large file transfer'.format( model["type"] ), ) if "content" not in model and model["type"] != "directory": raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) try: if chunk == 1: self.log.debug("Saving %s", os_path) self.run_pre_save_hook(model=model, path=path) super(LargeFileManager, self)._save_file( os_path, model["content"], model.get("format") ) else: self._save_large_file(os_path, model["content"], model.get("format")) except web.HTTPError: raise except Exception as e: self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) raise web.HTTPError( 500, u"Unexpected error while saving file: %s %s" % (path, e) ) from e model = self.get(path, content=False) # Last chunk if chunk == -1: self.run_post_save_hook(model=model, os_path=os_path) return model else: return super(LargeFileManager, self).save(model, path) def _save_large_file(self, os_path, content, format): """Save content of a generic file.""" if format not in {"text", "base64"}: raise web.HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: if format == "text": bcontent = content.encode("utf8") else: b64_bytes = content.encode("ascii") bcontent = base64.b64decode(b64_bytes) except Exception as e: raise web.HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.perm_to_403(os_path): if os.path.islink(os_path): os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) with io.open(os_path, "ab") as f: f.write(bcontent) class AsyncLargeFileManager(AsyncFileContentsManager): """Handle large file upload asynchronously""" async def save(self, model, path=""): """Save the file model and return the model with no content.""" chunk = model.get("chunk", None) if chunk is not None: path = path.strip("/") if "type" not in model: raise web.HTTPError(400, u"No file type provided") if model["type"] != "file": raise web.HTTPError( 400, u'File type "{}" is not supported for large file transfer'.format( model["type"] ), ) if "content" not in model and model["type"] != "directory": raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) try: if chunk == 1: self.log.debug("Saving %s", os_path) self.run_pre_save_hook(model=model, path=path) await super(AsyncLargeFileManager, self)._save_file( os_path, model["content"], model.get("format") ) else: await self._save_large_file(os_path, model["content"], model.get("format")) except web.HTTPError: raise except Exception as e: self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) raise web.HTTPError( 500, u"Unexpected error while saving file: %s %s" % (path, e) ) from e model = await self.get(path, content=False) # Last chunk if chunk == -1: self.run_post_save_hook(model=model, os_path=os_path) return model else: return await super(AsyncLargeFileManager, self).save(model, path) async def _save_large_file(self, os_path, content, format): """Save content of a generic file.""" if format not in {"text", "base64"}: raise web.HTTPError( 400, "Must specify format of file contents as 'text' or 'base64'", ) try: if format == "text": bcontent = content.encode("utf8") else: b64_bytes = content.encode("ascii") bcontent = base64.b64decode(b64_bytes) except Exception as e: raise web.HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) from e with self.perm_to_403(os_path): if os.path.islink(os_path): os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) with io.open(os_path, "ab") as f: await run_sync(f.write, bcontent) jupyter_server-1.13.1/jupyter_server/services/contents/manager.py000066400000000000000000000660701415445537200254440ustar00rootroot00000000000000"""A base class for contents managers.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import itertools import json import re from fnmatch import fnmatch from ipython_genutils.importstring import import_item from nbformat import sign from nbformat import validate as validate_nb from nbformat import ValidationError from nbformat.v4 import new_notebook from tornado.web import HTTPError from tornado.web import RequestHandler from traitlets import Any from traitlets import Bool from traitlets import default from traitlets import Dict from traitlets import Instance from traitlets import List from traitlets import TraitError from traitlets import Type from traitlets import Unicode from traitlets import validate from traitlets.config.configurable import LoggingConfigurable from ...files.handlers import FilesHandler from .checkpoints import AsyncCheckpoints from .checkpoints import Checkpoints from jupyter_server.transutils import _i18n from jupyter_server.utils import ensure_async copy_pat = re.compile(r"\-Copy\d*\.") class ContentsManager(LoggingConfigurable): """Base class for serving files and directories. This serves any text or binary file, as well as directories, with special handling for JSON notebook documents. Most APIs take a path argument, which is always an API-style unicode path, and always refers to a directory. - unicode, not url-escaped - '/'-separated - leading and trailing '/' will be stripped - if unspecified, path defaults to '', indicating the root path. """ root_dir = Unicode("/", config=True) allow_hidden = Bool(False, config=True, help="Allow access to hidden files") notary = Instance(sign.NotebookNotary) def _notary_default(self): return sign.NotebookNotary(parent=self) hide_globs = List( Unicode(), [ u"__pycache__", "*.pyc", "*.pyo", ".DS_Store", "*.so", "*.dylib", "*~", ], config=True, help=""" Glob patterns to hide in file and directory listings. """, ) untitled_notebook = Unicode( _i18n("Untitled"), config=True, help="The base name used when creating untitled notebooks." ) untitled_file = Unicode( "untitled", config=True, help="The base name used when creating untitled files." ) untitled_directory = Unicode( "Untitled Folder", config=True, help="The base name used when creating untitled directories.", ) pre_save_hook = Any( None, config=True, allow_none=True, help="""Python callable or importstring thereof To be called on a contents model prior to save. This can be used to process the structure, such as removing notebook outputs or other side effects that should not be saved. It will be called as (all arguments passed by keyword):: hook(path=path, model=model, contents_manager=self) - model: the model to be saved. Includes file contents. Modifying this dict will affect the file that is stored. - path: the API path of the save destination - contents_manager: this ContentsManager instance """, ) @validate("pre_save_hook") def _validate_pre_save_hook(self, proposal): value = proposal["value"] if isinstance(value, str): value = import_item(self.pre_save_hook) if not callable(value): raise TraitError("pre_save_hook must be callable") return value def run_pre_save_hook(self, model, path, **kwargs): """Run the pre-save hook if defined, and log errors""" if self.pre_save_hook: try: self.log.debug("Running pre-save hook on %s", path) self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) except HTTPError: # allow custom HTTPErrors to raise, # rejecting the save with a message. raise except Exception: # unhandled errors don't prevent saving, # which could cause frustrating data loss self.log.error("Pre-save hook failed on %s", path, exc_info=True) checkpoints_class = Type(Checkpoints, config=True) checkpoints = Instance(Checkpoints, config=True) checkpoints_kwargs = Dict(config=True) @default("checkpoints") def _default_checkpoints(self): return self.checkpoints_class(**self.checkpoints_kwargs) @default("checkpoints_kwargs") def _default_checkpoints_kwargs(self): return dict( parent=self, log=self.log, ) files_handler_class = Type( FilesHandler, klass=RequestHandler, allow_none=True, config=True, help="""handler class to use when serving raw file requests. Default is a fallback that talks to the ContentsManager API, which may be inefficient, especially for large files. Local files-based ContentsManagers can use a StaticFileHandler subclass, which will be much more efficient. Access to these files should be Authenticated. """, ) files_handler_params = Dict( config=True, help="""Extra parameters to pass to files_handler_class. For example, StaticFileHandlers generally expect a `path` argument specifying the root directory from which to serve files. """, ) def get_extra_handlers(self): """Return additional handlers Default: self.files_handler_class on /files/.* """ handlers = [] if self.files_handler_class: handlers.append((r"/files/(.*)", self.files_handler_class, self.files_handler_params)) return handlers # ContentsManager API part 1: methods that must be # implemented in subclasses. def dir_exists(self, path): """Does a directory exist at the given path? Like os.path.isdir Override this method in subclasses. Parameters ---------- path : string The path to check Returns ------- exists : bool Whether the path does indeed exist. """ raise NotImplementedError def is_hidden(self, path): """Is path a hidden directory or file? Parameters ---------- path : string The path to check. This is an API path (`/` separated, relative to root dir). Returns ------- hidden : bool Whether the path is hidden. """ raise NotImplementedError def file_exists(self, path=""): """Does a file exist at the given path? Like os.path.isfile Override this method in subclasses. Parameters ---------- path : string The API path of a file to check for. Returns ------- exists : bool Whether the file exists. """ raise NotImplementedError("must be implemented in a subclass") def exists(self, path): """Does a file or directory exist at the given path? Like os.path.exists Parameters ---------- path : string The API path of a file or directory to check for. Returns ------- exists : bool Whether the target exists. """ return self.file_exists(path) or self.dir_exists(path) def get(self, path, content=True, type=None, format=None): """Get a file or directory model.""" raise NotImplementedError("must be implemented in a subclass") def save(self, model, path): """ Save a file or directory model to path. Should return the saved model with no content. Save implementations should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ raise NotImplementedError("must be implemented in a subclass") def delete_file(self, path): """Delete the file or directory at path.""" raise NotImplementedError("must be implemented in a subclass") def rename_file(self, old_path, new_path): """Rename a file or directory.""" raise NotImplementedError("must be implemented in a subclass") # ContentsManager API part 2: methods that have useable default # implementations, but can be overridden in subclasses. def delete(self, path): """Delete a file/directory and any associated checkpoints.""" path = path.strip("/") if not path: raise HTTPError(400, "Can't delete root") self.delete_file(path) self.checkpoints.delete_all_checkpoints(path) def rename(self, old_path, new_path): """Rename a file and any checkpoints associated with that file.""" self.rename_file(old_path, new_path) self.checkpoints.rename_all_checkpoints(old_path, new_path) def update(self, model, path): """Update the file's path For use in PATCH requests, to enable renaming a file without re-uploading its contents. Only used for renaming at the moment. """ path = path.strip("/") new_path = model.get("path", path).strip("/") if path != new_path: self.rename(path, new_path) model = self.get(new_path, content=False) return model def info_string(self): return "Serving contents" def get_kernel_path(self, path, model=None): """Return the API path for the kernel KernelManagers can turn this value into a filesystem path, or ignore it altogether. The default value here will start kernels in the directory of the notebook server. FileContentsManager overrides this to use the directory containing the notebook. """ return "" def increment_filename(self, filename, path="", insert=""): """Increment a filename until it is unique. Parameters ---------- filename : unicode The name of a file, including extension path : unicode The API path of the target's directory insert : unicode The characters to insert after the base filename Returns ------- name : unicode A filename that is unique, based on the input filename. """ # Extract the full suffix from the filename (e.g. .tar.gz) path = path.strip("/") basename, dot, ext = filename.rpartition(".") if ext != "ipynb": basename, dot, ext = filename.partition(".") suffix = dot + ext for i in itertools.count(): if i: insert_i = "{}{}".format(insert, i) else: insert_i = "" name = u"{basename}{insert}{suffix}".format( basename=basename, insert=insert_i, suffix=suffix ) if not self.exists(u"{}/{}".format(path, name)): break return name def validate_notebook_model(self, model): """Add failed-validation message to model""" try: validate_nb(model["content"]) except ValidationError as e: model["message"] = u"Notebook validation failed: {}:\n{}".format( e.message, json.dumps(e.instance, indent=1, default=lambda obj: ""), ) return model def new_untitled(self, path="", type="", ext=""): """Create a new untitled file or directory in path path must be a directory File extension can be specified. Use `new` to create files with a fully specified path (including filename). """ path = path.strip("/") if not self.dir_exists(path): raise HTTPError(404, "No such directory: %s" % path) model = {} if type: model["type"] = type if ext == ".ipynb": model.setdefault("type", "notebook") else: model.setdefault("type", "file") insert = "" if model["type"] == "directory": untitled = self.untitled_directory insert = " " elif model["type"] == "notebook": untitled = self.untitled_notebook ext = ".ipynb" elif model["type"] == "file": untitled = self.untitled_file else: raise HTTPError(400, "Unexpected model type: %r" % model["type"]) name = self.increment_filename(untitled + ext, path, insert=insert) path = u"{0}/{1}".format(path, name) return self.new(model, path) def new(self, model=None, path=""): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip("/") if model is None: model = {} if path.endswith(".ipynb"): model.setdefault("type", "notebook") else: model.setdefault("type", "file") # no content, not a directory, so fill out new-file model if "content" not in model and model["type"] != "directory": if model["type"] == "notebook": model["content"] = new_notebook() model["format"] = "json" else: model["content"] = "" model["type"] = "file" model["format"] = "text" model = self.save(model, path) return model def copy(self, from_path, to_path=None): """Copy an existing file and return its new model. If to_path not specified, it will be the parent directory of from_path. If to_path is a directory, filename will increment `from_path-Copy#.ext`. Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. from_path must be a full path to a file. """ path = from_path.strip("/") if to_path is not None: to_path = to_path.strip("/") if "/" in path: from_dir, from_name = path.rsplit("/", 1) else: from_dir = "" from_name = path model = self.get(path) model.pop("path", None) model.pop("name", None) if model["type"] == "directory": raise HTTPError(400, "Can't copy directories") if to_path is None: to_path = from_dir if self.dir_exists(to_path): name = copy_pat.sub(u".", from_name) to_name = self.increment_filename(name, to_path, insert="-Copy") to_path = u"{0}/{1}".format(to_path, to_name) model = self.save(model, to_path) return model def log_info(self): self.log.info(self.info_string()) def trust_notebook(self, path): """Explicitly trust a notebook Parameters ---------- path : string The path of a notebook """ model = self.get(path) nb = model["content"] self.log.warning("Trusting notebook %s", path) self.notary.mark_cells(nb, True) self.check_and_sign(nb, path) def check_and_sign(self, nb, path=""): """Check for trusted cells, and sign the notebook. Called as a part of saving notebooks. Parameters ---------- nb : dict The notebook dict path : string The notebook's path (for logging) """ if self.notary.check_cells(nb): self.notary.sign(nb) else: self.log.warning("Notebook %s is not trusted", path) def mark_trusted_cells(self, nb, path=""): """Mark cells as trusted if the notebook signature matches. Called as a part of loading notebooks. Parameters ---------- nb : dict The notebook object (in current nbformat) path : string The notebook's path (for logging) """ trusted = self.notary.check_signature(nb) if not trusted: self.log.warning("Notebook %s is not trusted", path) self.notary.mark_cells(nb, trusted) def should_list(self, name): """Should this file/directory name be displayed in a listing?""" return not any(fnmatch(name, glob) for glob in self.hide_globs) # Part 3: Checkpoints API def create_checkpoint(self, path): """Create a checkpoint.""" return self.checkpoints.create_checkpoint(self, path) def restore_checkpoint(self, checkpoint_id, path): """ Restore a checkpoint. """ self.checkpoints.restore_checkpoint(self, checkpoint_id, path) def list_checkpoints(self, path): return self.checkpoints.list_checkpoints(path) def delete_checkpoint(self, checkpoint_id, path): return self.checkpoints.delete_checkpoint(checkpoint_id, path) class AsyncContentsManager(ContentsManager): """Base class for serving files and directories asynchronously.""" checkpoints_class = Type(AsyncCheckpoints, config=True) checkpoints = Instance(AsyncCheckpoints, config=True) checkpoints_kwargs = Dict(config=True) @default("checkpoints") def _default_checkpoints(self): return self.checkpoints_class(**self.checkpoints_kwargs) @default("checkpoints_kwargs") def _default_checkpoints_kwargs(self): return dict( parent=self, log=self.log, ) # ContentsManager API part 1: methods that must be # implemented in subclasses. async def dir_exists(self, path): """Does a directory exist at the given path? Like os.path.isdir Override this method in subclasses. Parameters ---------- path : string The path to check Returns ------- exists : bool Whether the path does indeed exist. """ raise NotImplementedError async def is_hidden(self, path): """Is path a hidden directory or file? Parameters ---------- path : string The path to check. This is an API path (`/` separated, relative to root dir). Returns ------- hidden : bool Whether the path is hidden. """ raise NotImplementedError async def file_exists(self, path=""): """Does a file exist at the given path? Like os.path.isfile Override this method in subclasses. Parameters ---------- path : string The API path of a file to check for. Returns ------- exists : bool Whether the file exists. """ raise NotImplementedError("must be implemented in a subclass") async def exists(self, path): """Does a file or directory exist at the given path? Like os.path.exists Parameters ---------- path : string The API path of a file or directory to check for. Returns ------- exists : bool Whether the target exists. """ return await ensure_async(self.file_exists(path)) or await ensure_async( self.dir_exists(path) ) async def get(self, path, content=True, type=None, format=None): """Get a file or directory model.""" raise NotImplementedError("must be implemented in a subclass") async def save(self, model, path): """ Save a file or directory model to path. Should return the saved model with no content. Save implementations should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ raise NotImplementedError("must be implemented in a subclass") async def delete_file(self, path): """Delete the file or directory at path.""" raise NotImplementedError("must be implemented in a subclass") async def rename_file(self, old_path, new_path): """Rename a file or directory.""" raise NotImplementedError("must be implemented in a subclass") # ContentsManager API part 2: methods that have useable default # implementations, but can be overridden in subclasses. async def delete(self, path): """Delete a file/directory and any associated checkpoints.""" path = path.strip("/") if not path: raise HTTPError(400, "Can't delete root") await self.delete_file(path) await self.checkpoints.delete_all_checkpoints(path) async def rename(self, old_path, new_path): """Rename a file and any checkpoints associated with that file.""" await self.rename_file(old_path, new_path) await self.checkpoints.rename_all_checkpoints(old_path, new_path) async def update(self, model, path): """Update the file's path For use in PATCH requests, to enable renaming a file without re-uploading its contents. Only used for renaming at the moment. """ path = path.strip("/") new_path = model.get("path", path).strip("/") if path != new_path: await self.rename(path, new_path) model = await self.get(new_path, content=False) return model async def increment_filename(self, filename, path="", insert=""): """Increment a filename until it is unique. Parameters ---------- filename : unicode The name of a file, including extension path : unicode The API path of the target's directory insert : unicode The characters to insert after the base filename Returns ------- name : unicode A filename that is unique, based on the input filename. """ # Extract the full suffix from the filename (e.g. .tar.gz) path = path.strip("/") basename, dot, ext = filename.rpartition(".") if ext != "ipynb": basename, dot, ext = filename.partition(".") suffix = dot + ext for i in itertools.count(): if i: insert_i = "{}{}".format(insert, i) else: insert_i = "" name = u"{basename}{insert}{suffix}".format( basename=basename, insert=insert_i, suffix=suffix ) file_exists = await ensure_async(self.exists(u"{}/{}".format(path, name))) if not file_exists: break return name async def new_untitled(self, path="", type="", ext=""): """Create a new untitled file or directory in path path must be a directory File extension can be specified. Use `new` to create files with a fully specified path (including filename). """ path = path.strip("/") dir_exists = await ensure_async(self.dir_exists(path)) if not dir_exists: raise HTTPError(404, "No such directory: %s" % path) model = {} if type: model["type"] = type if ext == ".ipynb": model.setdefault("type", "notebook") else: model.setdefault("type", "file") insert = "" if model["type"] == "directory": untitled = self.untitled_directory insert = " " elif model["type"] == "notebook": untitled = self.untitled_notebook ext = ".ipynb" elif model["type"] == "file": untitled = self.untitled_file else: raise HTTPError(400, "Unexpected model type: %r" % model["type"]) name = await self.increment_filename(untitled + ext, path, insert=insert) path = u"{0}/{1}".format(path, name) return await self.new(model, path) async def new(self, model=None, path=""): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip("/") if model is None: model = {} if path.endswith(".ipynb"): model.setdefault("type", "notebook") else: model.setdefault("type", "file") # no content, not a directory, so fill out new-file model if "content" not in model and model["type"] != "directory": if model["type"] == "notebook": model["content"] = new_notebook() model["format"] = "json" else: model["content"] = "" model["type"] = "file" model["format"] = "text" model = await self.save(model, path) return model async def copy(self, from_path, to_path=None): """Copy an existing file and return its new model. If to_path not specified, it will be the parent directory of from_path. If to_path is a directory, filename will increment `from_path-Copy#.ext`. Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. from_path must be a full path to a file. """ path = from_path.strip("/") if to_path is not None: to_path = to_path.strip("/") if "/" in path: from_dir, from_name = path.rsplit("/", 1) else: from_dir = "" from_name = path model = await self.get(path) model.pop("path", None) model.pop("name", None) if model["type"] == "directory": raise HTTPError(400, "Can't copy directories") if to_path is None: to_path = from_dir if await ensure_async(self.dir_exists(to_path)): name = copy_pat.sub(u".", from_name) to_name = await self.increment_filename(name, to_path, insert="-Copy") to_path = u"{0}/{1}".format(to_path, to_name) model = await self.save(model, to_path) return model async def trust_notebook(self, path): """Explicitly trust a notebook Parameters ---------- path : string The path of a notebook """ model = await self.get(path) nb = model["content"] self.log.warning("Trusting notebook %s", path) self.notary.mark_cells(nb, True) self.check_and_sign(nb, path) # Part 3: Checkpoints API async def create_checkpoint(self, path): """Create a checkpoint.""" return await self.checkpoints.create_checkpoint(self, path) async def restore_checkpoint(self, checkpoint_id, path): """ Restore a checkpoint. """ await self.checkpoints.restore_checkpoint(self, checkpoint_id, path) async def list_checkpoints(self, path): return await self.checkpoints.list_checkpoints(path) async def delete_checkpoint(self, checkpoint_id, path): return await self.checkpoints.delete_checkpoint(checkpoint_id, path) jupyter_server-1.13.1/jupyter_server/services/kernels/000077500000000000000000000000001415445537200232555ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/kernels/__init__.py000066400000000000000000000000001415445537200253540ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/kernels/handlers.py000066400000000000000000000674701415445537200254450ustar00rootroot00000000000000"""Tornado handlers for kernels. Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#kernels-api """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json from textwrap import dedent from traceback import format_tb from ipython_genutils.py3compat import cast_unicode from jupyter_client import protocol_version as client_protocol_version try: from jupyter_client.jsonutil import json_default except ImportError: from jupyter_client.jsonutil import date_default as json_default from tornado import gen from tornado import web from tornado.concurrent import Future from tornado.ioloop import IOLoop from ...base.handlers import APIHandler from ...base.zmqhandlers import AuthenticatedZMQStreamHandler from ...base.zmqhandlers import deserialize_binary_message from jupyter_server.utils import ensure_async from jupyter_server.utils import url_escape from jupyter_server.utils import url_path_join class MainKernelHandler(APIHandler): @web.authenticated async def get(self): km = self.kernel_manager kernels = await ensure_async(km.list_kernels()) self.finish(json.dumps(kernels, default=json_default)) @web.authenticated async def post(self): km = self.kernel_manager model = self.get_json_body() if model is None: model = {"name": km.default_kernel_name} else: model.setdefault("name", km.default_kernel_name) kernel_id = await km.start_kernel(kernel_name=model["name"], path=model.get("path")) model = await ensure_async(km.kernel_model(kernel_id)) location = url_path_join(self.base_url, "api", "kernels", url_escape(kernel_id)) self.set_header("Location", location) self.set_status(201) self.finish(json.dumps(model, default=json_default)) class KernelHandler(APIHandler): @web.authenticated async def get(self, kernel_id): km = self.kernel_manager model = await ensure_async(km.kernel_model(kernel_id)) self.finish(json.dumps(model, default=json_default)) @web.authenticated async def delete(self, kernel_id): km = self.kernel_manager await ensure_async(km.shutdown_kernel(kernel_id)) self.set_status(204) self.finish() class KernelActionHandler(APIHandler): @web.authenticated async def post(self, kernel_id, action): km = self.kernel_manager if action == "interrupt": await ensure_async(km.interrupt_kernel(kernel_id)) self.set_status(204) if action == "restart": try: await km.restart_kernel(kernel_id) except Exception as e: message = "Exception restarting kernel" self.log.error(message, exc_info=True) traceback = format_tb(e.__traceback__) self.write(json.dumps(dict(message=message, traceback=traceback))) self.set_status(500) else: model = await ensure_async(km.kernel_model(kernel_id)) self.write(json.dumps(model, default=json_default)) self.finish() class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): """There is one ZMQChannelsHandler per running kernel and it oversees all the sessions. """ # class-level registry of open sessions # allows checking for conflict on session-id, # which is used as a zmq identity and must be unique. _open_sessions = {} @property def kernel_info_timeout(self): km_default = self.kernel_manager.kernel_info_timeout return self.settings.get("kernel_info_timeout", km_default) @property def iopub_msg_rate_limit(self): return self.settings.get("iopub_msg_rate_limit", 0) @property def iopub_data_rate_limit(self): return self.settings.get("iopub_data_rate_limit", 0) @property def rate_limit_window(self): return self.settings.get("rate_limit_window", 1.0) def __repr__(self): return "%s(%s)" % (self.__class__.__name__, getattr(self, "kernel_id", "uninitialized")) def create_stream(self): km = self.kernel_manager identity = self.session.bsession for channel in ("iopub", "shell", "control", "stdin"): meth = getattr(km, "connect_" + channel) self.channels[channel] = stream = meth(self.kernel_id, identity=identity) stream.channel = channel def nudge(self): """Nudge the zmq connections with kernel_info_requests Returns a Future that will resolve when we have received a shell or control reply and at least one iopub message, ensuring that zmq subscriptions are established, sockets are fully connected, and kernel is responsive. Keeps retrying kernel_info_request until these are both received. """ kernel = self.kernel_manager.get_kernel(self.kernel_id) # Do not nudge busy kernels as kernel info requests sent to shell are # queued behind execution requests. # nudging in this case would cause a potentially very long wait # before connections are opened, # plus it is *very* unlikely that a busy kernel will not finish # establishing its zmq subscriptions before processing the next request. if getattr(kernel, "execution_state") == "busy": self.log.debug("Nudge: not nudging busy kernel %s", self.kernel_id) f = Future() f.set_result(None) return f # Use a transient shell channel to prevent leaking # shell responses to the front-end. shell_channel = kernel.connect_shell() # Use a transient control channel to prevent leaking # control responses to the front-end. control_channel = kernel.connect_control() # The IOPub used by the client, whose subscriptions we are verifying. iopub_channel = self.channels["iopub"] info_future = Future() iopub_future = Future() both_done = gen.multi([info_future, iopub_future]) def finish(_=None): """Ensure all futures are resolved which in turn triggers cleanup """ for f in (info_future, iopub_future): if not f.done(): f.set_result(None) def cleanup(_=None): """Common cleanup""" loop.remove_timeout(nudge_handle) iopub_channel.stop_on_recv() if not shell_channel.closed(): shell_channel.close() if not control_channel.closed(): control_channel.close() # trigger cleanup when both message futures are resolved both_done.add_done_callback(cleanup) def on_shell_reply(msg): self.log.debug("Nudge: shell info reply received: %s", self.kernel_id) if not info_future.done(): self.log.debug("Nudge: resolving shell future: %s", self.kernel_id) info_future.set_result(None) def on_control_reply(msg): self.log.debug("Nudge: control info reply received: %s", self.kernel_id) if not info_future.done(): self.log.debug("Nudge: resolving control future: %s", self.kernel_id) info_future.set_result(None) def on_iopub(msg): self.log.debug("Nudge: IOPub received: %s", self.kernel_id) if not iopub_future.done(): iopub_channel.stop_on_recv() self.log.debug("Nudge: resolving iopub future: %s", self.kernel_id) iopub_future.set_result(None) iopub_channel.on_recv(on_iopub) shell_channel.on_recv(on_shell_reply) control_channel.on_recv(on_control_reply) loop = IOLoop.current() # Nudge the kernel with kernel info requests until we get an IOPub message def nudge(count): count += 1 # NOTE: this close check appears to never be True during on_open, # even when the peer has closed the connection if self.ws_connection is None or self.ws_connection.is_closing(): self.log.debug("Nudge: cancelling on closed websocket: %s", self.kernel_id) finish() return # check for stopped kernel if self.kernel_id not in self.kernel_manager: self.log.debug("Nudge: cancelling on stopped kernel: %s", self.kernel_id) finish() return # check for closed zmq socket if shell_channel.closed(): self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) finish() return # check for closed zmq socket if control_channel.closed(): self.log.debug("Nudge: cancelling on closed zmq socket: %s", self.kernel_id) finish() return if not both_done.done(): log = self.log.warning if count % 10 == 0 else self.log.debug log("Nudge: attempt %s on kernel %s" % (count, self.kernel_id)) self.session.send(shell_channel, "kernel_info_request") self.session.send(control_channel, "kernel_info_request") nonlocal nudge_handle nudge_handle = loop.call_later(0.5, nudge, count) nudge_handle = loop.call_later(0, nudge, count=0) # resolve with a timeout if we get no response future = gen.with_timeout(loop.time() + self.kernel_info_timeout, both_done) # ensure we have no dangling resources or unresolved Futures in case of timeout future.add_done_callback(finish) return future def request_kernel_info(self): """send a request for kernel_info""" km = self.kernel_manager kernel = km.get_kernel(self.kernel_id) try: # check for previous request future = kernel._kernel_info_future except AttributeError: self.log.debug("Requesting kernel info from %s", self.kernel_id) # Create a kernel_info channel to query the kernel protocol version. # This channel will be closed after the kernel_info reply is received. if self.kernel_info_channel is None: self.kernel_info_channel = km.connect_shell(self.kernel_id) self.kernel_info_channel.on_recv(self._handle_kernel_info_reply) self.session.send(self.kernel_info_channel, "kernel_info_request") # store the future on the kernel, so only one request is sent kernel._kernel_info_future = self._kernel_info_future else: if not future.done(): self.log.debug("Waiting for pending kernel_info request") future.add_done_callback(lambda f: self._finish_kernel_info(f.result())) return self._kernel_info_future def _handle_kernel_info_reply(self, msg): """process the kernel_info_reply enabling msg spec adaptation, if necessary """ idents, msg = self.session.feed_identities(msg) try: msg = self.session.deserialize(msg) except: self.log.error("Bad kernel_info reply", exc_info=True) self._kernel_info_future.set_result({}) return else: info = msg["content"] self.log.debug("Received kernel info: %s", info) if msg["msg_type"] != "kernel_info_reply" or "protocol_version" not in info: self.log.error("Kernel info request failed, assuming current %s", info) info = {} self._finish_kernel_info(info) # close the kernel_info channel, we don't need it anymore if self.kernel_info_channel: self.kernel_info_channel.close() self.kernel_info_channel = None def _finish_kernel_info(self, info): """Finish handling kernel_info reply Set up protocol adaptation, if needed, and signal that connection can continue. """ protocol_version = info.get("protocol_version", client_protocol_version) if protocol_version != client_protocol_version: self.session.adapt_version = int(protocol_version.split(".")[0]) self.log.info( "Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format( protocol_version=protocol_version, kernel_id=self.kernel_id, client_protocol_version=client_protocol_version, ) ) if not self._kernel_info_future.done(): self._kernel_info_future.set_result(info) def initialize(self): super(ZMQChannelsHandler, self).initialize() self.zmq_stream = None self.channels = {} self.kernel_id = None self.kernel_info_channel = None self._kernel_info_future = Future() self._close_future = Future() self.session_key = "" # Rate limiting code self._iopub_window_msg_count = 0 self._iopub_window_byte_count = 0 self._iopub_msgs_exceeded = False self._iopub_data_exceeded = False # Queue of (time stamp, byte count) # Allows you to specify that the byte count should be lowered # by a delta amount at some point in the future. self._iopub_window_byte_queue = [] async def pre_get(self): # authenticate first super(ZMQChannelsHandler, self).pre_get() # check session collision: await self._register_session() # then request kernel info, waiting up to a certain time before giving up. # We don't want to wait forever, because browsers don't take it well when # servers never respond to websocket connection requests. kernel = self.kernel_manager.get_kernel(self.kernel_id) if hasattr(kernel, "ready"): try: await kernel.ready except Exception as e: kernel.execution_state = "dead" kernel.reason = str(e) raise web.HTTPError(500, str(e)) from e self.session.key = kernel.session.key future = self.request_kernel_info() def give_up(): """Don't wait forever for the kernel to reply""" if future.done(): return self.log.warning("Timeout waiting for kernel_info reply from %s", self.kernel_id) future.set_result({}) loop = IOLoop.current() loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up) # actually wait for it await future async def get(self, kernel_id): self.kernel_id = cast_unicode(kernel_id, "ascii") await super(ZMQChannelsHandler, self).get(kernel_id=kernel_id) async def _register_session(self): """Ensure we aren't creating a duplicate session. If a previous identical session is still open, close it to avoid collisions. This is likely due to a client reconnecting from a lost network connection, where the socket on our side has not been cleaned up yet. """ self.session_key = "%s:%s" % (self.kernel_id, self.session.session) stale_handler = self._open_sessions.get(self.session_key) if stale_handler: self.log.warning("Replacing stale connection: %s", self.session_key) await stale_handler.close() self._open_sessions[self.session_key] = self def open(self, kernel_id): super(ZMQChannelsHandler, self).open() km = self.kernel_manager km.notify_connect(kernel_id) # on new connections, flush the message buffer buffer_info = km.get_buffer(kernel_id, self.session_key) if buffer_info and buffer_info["session_key"] == self.session_key: self.log.info("Restoring connection for %s", self.session_key) if km.ports_changed(kernel_id): # If the kernel's ports have changed (some restarts trigger this) # then reset the channels so nudge() is using the correct iopub channel self.create_stream() else: # The kernel's ports have not changed; use the channels captured in the buffer self.channels = buffer_info["channels"] connected = self.nudge() def replay(value): replay_buffer = buffer_info["buffer"] if replay_buffer: self.log.info("Replaying %s buffered messages", len(replay_buffer)) for channel, msg_list in replay_buffer: stream = self.channels[channel] self._on_zmq_reply(stream, msg_list) connected.add_done_callback(replay) else: try: self.create_stream() connected = self.nudge() except web.HTTPError as e: # Do not log error if the kernel is already shutdown, # as it's normal that it's not responding try: self.kernel_manager.get_kernel(kernel_id) self.log.error("Error opening stream: %s", e) except KeyError: pass # WebSockets don't respond to traditional error codes so we # close the connection. for channel, stream in self.channels.items(): if not stream.closed(): stream.close() self.close() return km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) km.add_restart_callback(self.kernel_id, self.on_restart_failed, "dead") def subscribe(value): for channel, stream in self.channels.items(): stream.on_recv_stream(self._on_zmq_reply) connected.add_done_callback(subscribe) return connected def on_message(self, msg): if not self.channels: # already closed, ignore the message self.log.debug("Received message on closed websocket %r", msg) return if isinstance(msg, bytes): msg = deserialize_binary_message(msg) else: msg = json.loads(msg) channel = msg.pop("channel", None) if channel is None: self.log.warning("No channel specified, assuming shell: %s", msg) channel = "shell" if channel not in self.channels: self.log.warning("No such channel: %r", channel) return am = self.kernel_manager.allowed_message_types mt = msg["header"]["msg_type"] if am and mt not in am: self.log.warning('Received message of type "%s", which is not allowed. Ignoring.' % mt) else: stream = self.channels[channel] self.session.send(stream, msg) def _on_zmq_reply(self, stream, msg_list): idents, fed_msg_list = self.session.feed_identities(msg_list) msg = self.session.deserialize(fed_msg_list) parent = msg["parent_header"] def write_stderr(error_message): self.log.warning(error_message) msg = self.session.msg( "stream", content={"text": error_message + "\n", "name": "stderr"}, parent=parent ) msg["channel"] = "iopub" self.write_message(json.dumps(msg, default=json_default)) channel = getattr(stream, "channel", None) msg_type = msg["header"]["msg_type"] if channel == "iopub" and msg_type == "error": self._on_error(msg) if ( channel == "iopub" and msg_type == "status" and msg["content"].get("execution_state") == "idle" ): # reset rate limit counter on status=idle, # to avoid 'Run All' hitting limits prematurely. self._iopub_window_byte_queue = [] self._iopub_window_msg_count = 0 self._iopub_window_byte_count = 0 self._iopub_msgs_exceeded = False self._iopub_data_exceeded = False if channel == "iopub" and msg_type not in {"status", "comm_open", "execute_input"}: # Remove the counts queued for removal. now = IOLoop.current().time() while len(self._iopub_window_byte_queue) > 0: queued = self._iopub_window_byte_queue[0] if now >= queued[0]: self._iopub_window_byte_count -= queued[1] self._iopub_window_msg_count -= 1 del self._iopub_window_byte_queue[0] else: # This part of the queue hasn't be reached yet, so we can # abort the loop. break # Increment the bytes and message count self._iopub_window_msg_count += 1 if msg_type == "stream": byte_count = sum([len(x) for x in msg_list]) else: byte_count = 0 self._iopub_window_byte_count += byte_count # Queue a removal of the byte and message count for a time in the # future, when we are no longer interested in it. self._iopub_window_byte_queue.append((now + self.rate_limit_window, byte_count)) # Check the limits, set the limit flags, and reset the # message and data counts. msg_rate = float(self._iopub_window_msg_count) / self.rate_limit_window data_rate = float(self._iopub_window_byte_count) / self.rate_limit_window # Check the msg rate if self.iopub_msg_rate_limit > 0 and msg_rate > self.iopub_msg_rate_limit: if not self._iopub_msgs_exceeded: self._iopub_msgs_exceeded = True write_stderr( dedent( """\ IOPub message rate exceeded. The Jupyter server will temporarily stop sending output to the client in order to avoid crashing it. To change this limit, set the config variable `--ServerApp.iopub_msg_rate_limit`. Current values: ServerApp.iopub_msg_rate_limit={} (msgs/sec) ServerApp.rate_limit_window={} (secs) """.format( self.iopub_msg_rate_limit, self.rate_limit_window ) ) ) else: # resume once we've got some headroom below the limit if self._iopub_msgs_exceeded and msg_rate < (0.8 * self.iopub_msg_rate_limit): self._iopub_msgs_exceeded = False if not self._iopub_data_exceeded: self.log.warning("iopub messages resumed") # Check the data rate if self.iopub_data_rate_limit > 0 and data_rate > self.iopub_data_rate_limit: if not self._iopub_data_exceeded: self._iopub_data_exceeded = True write_stderr( dedent( """\ IOPub data rate exceeded. The Jupyter server will temporarily stop sending output to the client in order to avoid crashing it. To change this limit, set the config variable `--ServerApp.iopub_data_rate_limit`. Current values: ServerApp.iopub_data_rate_limit={} (bytes/sec) ServerApp.rate_limit_window={} (secs) """.format( self.iopub_data_rate_limit, self.rate_limit_window ) ) ) else: # resume once we've got some headroom below the limit if self._iopub_data_exceeded and data_rate < (0.8 * self.iopub_data_rate_limit): self._iopub_data_exceeded = False if not self._iopub_msgs_exceeded: self.log.warning("iopub messages resumed") # If either of the limit flags are set, do not send the message. if self._iopub_msgs_exceeded or self._iopub_data_exceeded: # we didn't send it, remove the current message from the calculus self._iopub_window_msg_count -= 1 self._iopub_window_byte_count -= byte_count self._iopub_window_byte_queue.pop(-1) return super(ZMQChannelsHandler, self)._on_zmq_reply(stream, msg) def close(self): super(ZMQChannelsHandler, self).close() return self._close_future def on_close(self): self.log.debug("Websocket closed %s", self.session_key) # unregister myself as an open session (only if it's really me) if self._open_sessions.get(self.session_key) is self: self._open_sessions.pop(self.session_key) km = self.kernel_manager if self.kernel_id in km: km.notify_disconnect(self.kernel_id) km.remove_restart_callback( self.kernel_id, self.on_kernel_restarted, ) km.remove_restart_callback( self.kernel_id, self.on_restart_failed, "dead", ) # start buffering instead of closing if this was the last connection if km._kernel_connections[self.kernel_id] == 0: km.start_buffering(self.kernel_id, self.session_key, self.channels) self._close_future.set_result(None) return # This method can be called twice, once by self.kernel_died and once # from the WebSocket close event. If the WebSocket connection is # closed before the ZMQ streams are setup, they could be None. for channel, stream in self.channels.items(): if stream is not None and not stream.closed(): stream.on_recv(None) stream.close() self.channels = {} self._close_future.set_result(None) def _send_status_message(self, status): iopub = self.channels.get("iopub", None) if iopub and not iopub.closed(): # flush IOPub before sending a restarting/dead status message # ensures proper ordering on the IOPub channel # that all messages from the stopped kernel have been delivered iopub.flush() msg = self.session.msg("status", {"execution_state": status}) msg["channel"] = "iopub" self.write_message(json.dumps(msg, default=json_default)) def on_kernel_restarted(self): self.log.warning("kernel %s restarted", self.kernel_id) self._send_status_message("restarting") def on_restart_failed(self): self.log.error("kernel %s restarted failed!", self.kernel_id) self._send_status_message("dead") def _on_error(self, msg): if self.kernel_manager.allow_tracebacks: return msg["content"]["ename"] = "ExecutionError" msg["content"]["evalue"] = "Execution error" msg["content"]["traceback"] = [self.kernel_manager.traceback_replacement_message] # ----------------------------------------------------------------------------- # URL to handler mappings # ----------------------------------------------------------------------------- _kernel_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" _kernel_action_regex = r"(?Prestart|interrupt)" default_handlers = [ (r"/api/kernels", MainKernelHandler), (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler), (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler), (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler), ] jupyter_server-1.13.1/jupyter_server/services/kernels/kernelmanager.py000066400000000000000000000622561415445537200264550ustar00rootroot00000000000000"""A MultiKernelManager for use in the Jupyter server - raises HTTPErrors - creates REST API models """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import os from collections import defaultdict from datetime import datetime from datetime import timedelta from functools import partial from jupyter_client.multikernelmanager import AsyncMultiKernelManager from jupyter_client.multikernelmanager import MultiKernelManager from jupyter_client.session import Session from jupyter_core.paths import exists from tornado import web from tornado.concurrent import Future from tornado.ioloop import IOLoop from tornado.ioloop import PeriodicCallback from traitlets import Any from traitlets import Bool from traitlets import default from traitlets import Dict from traitlets import Float from traitlets import Instance from traitlets import Integer from traitlets import List from traitlets import TraitError from traitlets import Unicode from traitlets import validate from jupyter_server._tz import isoformat from jupyter_server._tz import utcnow from jupyter_server.prometheus.metrics import KERNEL_CURRENTLY_RUNNING_TOTAL from jupyter_server.utils import ensure_async from jupyter_server.utils import to_os_path class MappingKernelManager(MultiKernelManager): """A KernelManager that handles - File mapping - HTTP error handling - Kernel message filtering """ @default("kernel_manager_class") def _default_kernel_manager_class(self): return "jupyter_client.ioloop.IOLoopKernelManager" kernel_argv = List(Unicode()) root_dir = Unicode(config=True) _kernel_connections = Dict() _kernel_ports = Dict() _culler_callback = None _initialized_culler = False @default("root_dir") def _default_root_dir(self): try: return self.parent.root_dir except AttributeError: return os.getcwd() @validate("root_dir") def _update_root_dir(self, proposal): """Do a bit of validation of the root dir.""" value = proposal["value"] if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) if not exists(value) or not os.path.isdir(value): raise TraitError("kernel root dir %r is not a directory" % value) return value cull_idle_timeout = Integer( 0, config=True, help="""Timeout (in seconds) after which a kernel is considered idle and ready to be culled. Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled for users with poor network connections.""", ) cull_interval_default = 300 # 5 minutes cull_interval = Integer( cull_interval_default, config=True, help="""The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value.""", ) cull_connected = Bool( False, config=True, help="""Whether to consider culling kernels which have one or more connections. Only effective if cull_idle_timeout > 0.""", ) cull_busy = Bool( False, config=True, help="""Whether to consider culling kernels which are busy. Only effective if cull_idle_timeout > 0.""", ) buffer_offline_messages = Bool( True, config=True, help="""Whether messages from kernels whose frontends have disconnected should be buffered in-memory. When True (default), messages are buffered and replayed on reconnect, avoiding lost messages due to interrupted connectivity. Disable if long-running kernels will produce too much output while no frontends are connected. """, ) kernel_info_timeout = Float( 60, config=True, help="""Timeout for giving up on a kernel (in seconds). On starting and restarting kernels, we check whether the kernel is running and responsive by sending kernel_info_requests. This sets the timeout in seconds for how long the kernel can take before being presumed dead. This affects the MappingKernelManager (which handles kernel restarts) and the ZMQChannelsHandler (which handles the startup). """, ) _kernel_buffers = Any() @default("_kernel_buffers") def _default_kernel_buffers(self): return defaultdict(lambda: {"buffer": [], "session_key": "", "channels": {}}) last_kernel_activity = Instance( datetime, help="The last activity on any kernel, including shutting down a kernel" ) def __init__(self, **kwargs): self.pinned_superclass = MultiKernelManager self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() allowed_message_types = List( trait=Unicode(), config=True, help="""White list of allowed kernel message types. When the list is empty, all message types are allowed. """, ) allow_tracebacks = Bool( True, config=True, help=("Whether to send tracebacks to clients on exceptions.") ) traceback_replacement_message = Unicode( "An exception occurred at runtime, which is not shown due to security reasons.", config=True, help=("Message to print when allow_tracebacks is False, and an exception occurs"), ) # ------------------------------------------------------------------------- # Methods for managing kernels and sessions # ------------------------------------------------------------------------- def _handle_kernel_died(self, kernel_id): """notice that a kernel died""" self.log.warning("Kernel %s died, removing from map.", kernel_id) self.remove_kernel(kernel_id) def cwd_for_path(self, path): """Turn API path into absolute OS path.""" os_path = to_os_path(path, self.root_dir) # in the case of documents and kernels not being on the same filesystem, # walk up to root_dir if the paths don't exist while not os.path.isdir(os_path) and os_path != self.root_dir: os_path = os.path.dirname(os_path) return os_path async def start_kernel(self, kernel_id=None, path=None, **kwargs): """Start a kernel for a session and return its kernel_id. Parameters ---------- kernel_id : uuid The uuid to associate the new kernel with. If this is not None, this kernel will be persistent whenever it is requested. path : API path The API path (unicode, '/' delimited) for the cwd. Will be transformed to an OS path relative to root_dir. kernel_name : str The name identifying which kernel spec to launch. This is ignored if an existing kernel is returned, but it may be checked in the future. """ if kernel_id is None or kernel_id not in self: if path is not None: kwargs["cwd"] = self.cwd_for_path(path) if kernel_id is not None: kwargs["kernel_id"] = kernel_id kernel_id = await ensure_async(self.pinned_superclass.start_kernel(self, **kwargs)) self._kernel_connections[kernel_id] = 0 fut = asyncio.ensure_future(self._finish_kernel_start(kernel_id)) if not getattr(self, "use_pending_kernels", None): await fut # add busy/activity markers: kernel = self.get_kernel(kernel_id) kernel.execution_state = "starting" kernel.reason = "" kernel.last_activity = utcnow() self.log.info("Kernel started: %s" % kernel_id) self.log.debug("Kernel args: %r" % kwargs) # Increase the metric of number of kernels running # for the relevant kernel type by 1 KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).inc() else: self.log.info("Using existing kernel: %s" % kernel_id) # Initialize culling if not already if not self._initialized_culler: self.initialize_culler() return kernel_id async def _finish_kernel_start(self, kernel_id): km = self.get_kernel(kernel_id) if hasattr(km, "ready"): try: await km.ready except Exception: self.log.exception(km.ready.exception()) return self._kernel_ports[kernel_id] = km.ports self.start_watching_activity(kernel_id) # register callback for failed auto-restart self.add_restart_callback( kernel_id, lambda: self._handle_kernel_died(kernel_id), "dead", ) def ports_changed(self, kernel_id): """Used by ZMQChannelsHandler to determine how to coordinate nudge and replays. Ports are captured when starting a kernel (via MappingKernelManager). Ports are considered changed (following restarts) if the referenced KernelManager is using a set of ports different from those captured at startup. If changes are detected, the captured set is updated and a value of True is returned. NOTE: Use is exclusive to ZMQChannelsHandler because this object is a singleton instance while ZMQChannelsHandler instances are per WebSocket connection that can vary per kernel lifetime. """ changed_ports = self._get_changed_ports(kernel_id) if changed_ports: # If changed, update captured ports and return True, else return False. self.log.debug(f"Port change detected for kernel: {kernel_id}") self._kernel_ports[kernel_id] = changed_ports return True return False def _get_changed_ports(self, kernel_id): """Internal method to test if a kernel's ports have changed and, if so, return their values. This method does NOT update the captured ports for the kernel as that can only be done by ZMQChannelsHandler, but instead returns the new list of ports if they are different than those captured at startup. This enables the ability to conditionally restart activity monitoring immediately following a kernel's restart (if ports have changed). """ # Get current ports and return comparison with ports captured at startup. km = self.get_kernel(kernel_id) if km.ports != self._kernel_ports[kernel_id]: return km.ports return None def start_buffering(self, kernel_id, session_key, channels): """Start buffering messages for a kernel Parameters ---------- kernel_id : str The id of the kernel to stop buffering. session_key : str The session_key, if any, that should get the buffer. If the session_key matches the current buffered session_key, the buffer will be returned. channels : dict({'channel': ZMQStream}) The zmq channels whose messages should be buffered. """ if not self.buffer_offline_messages: for channel, stream in channels.items(): stream.close() return self.log.info("Starting buffering for %s", session_key) self._check_kernel_id(kernel_id) # clear previous buffering state self.stop_buffering(kernel_id) buffer_info = self._kernel_buffers[kernel_id] # record the session key because only one session can buffer buffer_info["session_key"] = session_key # TODO: the buffer should likely be a memory bounded queue, we're starting with a list to keep it simple buffer_info["buffer"] = [] buffer_info["channels"] = channels # forward any future messages to the internal buffer def buffer_msg(channel, msg_parts): self.log.debug("Buffering msg on %s:%s", kernel_id, channel) buffer_info["buffer"].append((channel, msg_parts)) for channel, stream in channels.items(): stream.on_recv(partial(buffer_msg, channel)) def get_buffer(self, kernel_id, session_key): """Get the buffer for a given kernel Parameters ---------- kernel_id : str The id of the kernel to stop buffering. session_key : str, optional The session_key, if any, that should get the buffer. If the session_key matches the current buffered session_key, the buffer will be returned. """ self.log.debug("Getting buffer for %s", kernel_id) if kernel_id not in self._kernel_buffers: return buffer_info = self._kernel_buffers[kernel_id] if buffer_info["session_key"] == session_key: # remove buffer self._kernel_buffers.pop(kernel_id) # only return buffer_info if it's a match return buffer_info else: self.stop_buffering(kernel_id) def stop_buffering(self, kernel_id): """Stop buffering kernel messages Parameters ---------- kernel_id : str The id of the kernel to stop buffering. """ self.log.debug("Clearing buffer for %s", kernel_id) self._check_kernel_id(kernel_id) if kernel_id not in self._kernel_buffers: return buffer_info = self._kernel_buffers.pop(kernel_id) # close buffering streams for stream in buffer_info["channels"].values(): if not stream.closed(): stream.on_recv(None) stream.close() msg_buffer = buffer_info["buffer"] if msg_buffer: self.log.info( "Discarding %s buffered messages for %s", len(msg_buffer), buffer_info["session_key"], ) def shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by kernel_id""" self._check_kernel_id(kernel_id) self.stop_watching_activity(kernel_id) self.stop_buffering(kernel_id) self._kernel_connections.pop(kernel_id, None) # Decrease the metric of number of kernels # running for the relevant kernel type by 1 KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) # Unlike its async sibling method in AsyncMappingKernelManager, removing the kernel_id # from the connections dictionary isn't as problematic before the shutdown since the # method is synchronous. However, we'll keep the relative call orders the same from # a maintenance perspective. self._kernel_connections.pop(kernel_id, None) self._kernel_ports.pop(kernel_id, None) async def restart_kernel(self, kernel_id, now=False): """Restart a kernel by kernel_id""" self._check_kernel_id(kernel_id) await ensure_async(self.pinned_superclass.restart_kernel(self, kernel_id, now=now)) kernel = self.get_kernel(kernel_id) # return a Future that will resolve when the kernel has successfully restarted channel = kernel.connect_shell() future = Future() def finish(): """Common cleanup when restart finishes/fails for any reason.""" if not channel.closed(): channel.close() loop.remove_timeout(timeout) kernel.remove_restart_callback(on_restart_failed, "dead") def on_reply(msg): self.log.debug("Kernel info reply received: %s", kernel_id) finish() if not future.done(): future.set_result(msg) def on_timeout(): self.log.warning("Timeout waiting for kernel_info_reply: %s", kernel_id) finish() if not future.done(): future.set_exception(TimeoutError("Timeout waiting for restart")) def on_restart_failed(): self.log.warning("Restarting kernel failed: %s", kernel_id) finish() if not future.done(): future.set_exception(RuntimeError("Restart failed")) kernel.add_restart_callback(on_restart_failed, "dead") kernel.session.send(channel, "kernel_info_request") channel.on_recv(on_reply) loop = IOLoop.current() timeout = loop.add_timeout(loop.time() + self.kernel_info_timeout, on_timeout) # Re-establish activity watching if ports have changed... if self._get_changed_ports(kernel_id) is not None: self.stop_watching_activity(kernel_id) self.start_watching_activity(kernel_id) return future def notify_connect(self, kernel_id): """Notice a new connection to a kernel""" if kernel_id in self._kernel_connections: self._kernel_connections[kernel_id] += 1 def notify_disconnect(self, kernel_id): """Notice a disconnection from a kernel""" if kernel_id in self._kernel_connections: self._kernel_connections[kernel_id] -= 1 def kernel_model(self, kernel_id): """Return a JSON-safe dict representing a kernel For use in representing kernels in the JSON APIs. """ self._check_kernel_id(kernel_id) kernel = self._kernels[kernel_id] model = { "id": kernel_id, "name": kernel.kernel_name, "last_activity": isoformat(kernel.last_activity), "execution_state": kernel.execution_state, "connections": self._kernel_connections.get(kernel_id, 0), } if getattr(kernel, "reason", None): model["reason"] = kernel.reason return model def list_kernels(self): """Returns a list of kernel_id's of kernels running.""" kernels = [] kernel_ids = self.pinned_superclass.list_kernel_ids(self) for kernel_id in kernel_ids: try: model = self.kernel_model(kernel_id) kernels.append(model) except (web.HTTPError, KeyError): pass # Probably due to a (now) non-existent kernel, continue building the list return kernels # override _check_kernel_id to raise 404 instead of KeyError def _check_kernel_id(self, kernel_id): """Check a that a kernel_id exists and raise 404 if not.""" if kernel_id not in self: raise web.HTTPError(404, "Kernel does not exist: %s" % kernel_id) # monitoring activity: def start_watching_activity(self, kernel_id): """Start watching IOPub messages on a kernel for activity. - update last_activity on every message - record execution_state from status messages """ kernel = self._kernels[kernel_id] # add busy/activity markers: kernel.execution_state = "starting" kernel.reason = "" kernel.last_activity = utcnow() kernel._activity_stream = kernel.connect_iopub() session = Session( config=kernel.session.config, key=kernel.session.key, ) def record_activity(msg_list): """Record an IOPub message arriving from a kernel""" self.last_kernel_activity = kernel.last_activity = utcnow() idents, fed_msg_list = session.feed_identities(msg_list) msg = session.deserialize(fed_msg_list) msg_type = msg["header"]["msg_type"] if msg_type == "status": kernel.execution_state = msg["content"]["execution_state"] self.log.debug( "activity on %s: %s (%s)", kernel_id, msg_type, kernel.execution_state ) else: self.log.debug("activity on %s: %s", kernel_id, msg_type) kernel._activity_stream.on_recv(record_activity) def stop_watching_activity(self, kernel_id): """Stop watching IOPub messages on a kernel for activity.""" kernel = self._kernels[kernel_id] if getattr(kernel, "_activity_stream", None): kernel._activity_stream.close() kernel._activity_stream = None def initialize_culler(self): """Start idle culler if 'cull_idle_timeout' is greater than zero. Regardless of that value, set flag that we've been here. """ if not self._initialized_culler and self.cull_idle_timeout > 0: if self._culler_callback is None: loop = IOLoop.current() if self.cull_interval <= 0: # handle case where user set invalid value self.log.warning( "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", self.cull_interval, self.cull_interval_default, ) self.cull_interval = self.cull_interval_default self._culler_callback = PeriodicCallback( self.cull_kernels, 1000 * self.cull_interval ) self.log.info( "Culling kernels with idle durations > %s seconds at %s second intervals ...", self.cull_idle_timeout, self.cull_interval, ) if self.cull_busy: self.log.info("Culling kernels even if busy") if self.cull_connected: self.log.info("Culling kernels even with connected clients") self._culler_callback.start() self._initialized_culler = True async def cull_kernels(self): self.log.debug( "Polling every %s seconds for kernels idle > %s seconds...", self.cull_interval, self.cull_idle_timeout, ) """Create a separate list of kernels to avoid conflicting updates while iterating""" for kernel_id in list(self._kernels): try: await self.cull_kernel_if_idle(kernel_id) except Exception as e: self.log.exception( "The following exception was encountered while checking the idle duration of kernel %s: %s", kernel_id, e, ) async def cull_kernel_if_idle(self, kernel_id): kernel = self._kernels[kernel_id] if getattr(kernel, "execution_state") == "dead": self.log.warning( "Culling '%s' dead kernel '%s' (%s).", kernel.execution_state, kernel.kernel_name, kernel_id, ) await ensure_async(self.shutdown_kernel(kernel_id)) return if hasattr( kernel, "last_activity" ): # last_activity is monkey-patched, so ensure that has occurred self.log.debug( "kernel_id=%s, kernel_name=%s, last_activity=%s", kernel_id, kernel.kernel_name, kernel.last_activity, ) dt_now = utcnow() dt_idle = dt_now - kernel.last_activity # Compute idle properties is_idle_time = dt_idle > timedelta(seconds=self.cull_idle_timeout) is_idle_execute = self.cull_busy or (kernel.execution_state != "busy") connections = self._kernel_connections.get(kernel_id, 0) is_idle_connected = self.cull_connected or not connections # Cull the kernel if all three criteria are met if is_idle_time and is_idle_execute and is_idle_connected: idle_duration = int(dt_idle.total_seconds()) self.log.warning( "Culling '%s' kernel '%s' (%s) with %d connections due to %s seconds of inactivity.", kernel.execution_state, kernel.kernel_name, kernel_id, connections, idle_duration, ) await ensure_async(self.shutdown_kernel(kernel_id)) # AsyncMappingKernelManager inherits as much as possible from MappingKernelManager, # overriding only what is different. class AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager): @default("kernel_manager_class") def _default_kernel_manager_class(self): return "jupyter_client.ioloop.AsyncIOLoopKernelManager" def __init__(self, **kwargs): self.pinned_superclass = AsyncMultiKernelManager self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() async def shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by kernel_id""" self._check_kernel_id(kernel_id) self.stop_watching_activity(kernel_id) self.stop_buffering(kernel_id) # Decrease the metric of number of kernels # running for the relevant kernel type by 1 KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() # Finish shutting down the kernel before clearing state to avoid a race condition. ret = await self.pinned_superclass.shutdown_kernel( self, kernel_id, now=now, restart=restart ) self._kernel_connections.pop(kernel_id, None) self._kernel_ports.pop(kernel_id, None) return ret jupyter_server-1.13.1/jupyter_server/services/kernelspecs/000077500000000000000000000000001415445537200241305ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/kernelspecs/__init__.py000066400000000000000000000000001415445537200262270ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/kernelspecs/handlers.py000066400000000000000000000064201415445537200263040ustar00rootroot00000000000000"""Tornado handlers for kernel specifications. Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-25%3A-Registry-of-installed-kernels#rest-api """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import glob import json import os pjoin = os.path.join from tornado import web from ...base.handlers import APIHandler from ...utils import ensure_async, url_path_join, url_unescape def kernelspec_model(handler, name, spec_dict, resource_dir): """Load a KernelSpec by name and return the REST API model""" d = {"name": name, "spec": spec_dict, "resources": {}} # Add resource files if they exist resource_dir = resource_dir for resource in ["kernel.js", "kernel.css"]: if os.path.exists(pjoin(resource_dir, resource)): d["resources"][resource] = url_path_join( handler.base_url, "kernelspecs", name, resource ) for logo_file in glob.glob(pjoin(resource_dir, "logo-*")): fname = os.path.basename(logo_file) no_ext, _ = os.path.splitext(fname) d["resources"][no_ext] = url_path_join(handler.base_url, "kernelspecs", name, fname) return d def is_kernelspec_model(spec_dict): """Returns True if spec_dict is already in proper form. This will occur when using a gateway.""" return ( isinstance(spec_dict, dict) and "name" in spec_dict and "spec" in spec_dict and "resources" in spec_dict ) class MainKernelSpecHandler(APIHandler): @web.authenticated async def get(self): ksm = self.kernel_spec_manager km = self.kernel_manager model = {} model["default"] = km.default_kernel_name model["kernelspecs"] = specs = {} kspecs = await ensure_async(ksm.get_all_specs()) for kernel_name, kernel_info in kspecs.items(): try: if is_kernelspec_model(kernel_info): d = kernel_info else: d = kernelspec_model( self, kernel_name, kernel_info["spec"], kernel_info["resource_dir"] ) except Exception: self.log.error("Failed to load kernel spec: '%s'", kernel_name, exc_info=True) continue specs[kernel_name] = d self.set_header("Content-Type", "application/json") self.finish(json.dumps(model)) class KernelSpecHandler(APIHandler): @web.authenticated async def get(self, kernel_name): ksm = self.kernel_spec_manager kernel_name = url_unescape(kernel_name) try: spec = await ensure_async(ksm.get_kernel_spec(kernel_name)) except KeyError as e: raise web.HTTPError(404, u"Kernel spec %s not found" % kernel_name) from e if is_kernelspec_model(spec): model = spec else: model = kernelspec_model(self, kernel_name, spec.to_dict(), spec.resource_dir) self.set_header("Content-Type", "application/json") self.finish(json.dumps(model)) # URL to handler mappings kernel_name_regex = r"(?P[\w\.\-%]+)" default_handlers = [ (r"/api/kernelspecs", MainKernelSpecHandler), (r"/api/kernelspecs/%s" % kernel_name_regex, KernelSpecHandler), ] jupyter_server-1.13.1/jupyter_server/services/nbconvert/000077500000000000000000000000001415445537200236125ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/nbconvert/__init__.py000066400000000000000000000000001415445537200257110ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/nbconvert/handlers.py000066400000000000000000000030441415445537200257650ustar00rootroot00000000000000import asyncio import json from anyio.to_thread import run_sync from tornado import web from ...base.handlers import APIHandler LOCK = asyncio.Lock() class NbconvertRootHandler(APIHandler): @web.authenticated async def get(self): try: from nbconvert.exporters import base except ImportError as e: raise web.HTTPError(500, "Could not import nbconvert: %s" % e) from e res = {} # Some exporters use the filesystem when instantiating, delegate that # to a thread so we don't block the event loop for it. exporters = await run_sync(base.get_export_names) for exporter_name in exporters: try: async with LOCK: exporter_class = await run_sync(base.get_exporter, exporter_name) except ValueError: # I think the only way this will happen is if the entrypoint # is uninstalled while this method is running continue # XXX: According to the docs, it looks like this should be set to None # if the exporter shouldn't be exposed to the front-end and a friendly # name if it should. However, none of the built-in exports have it defined. # if not exporter_class.export_from_notebook: # continue res[exporter_name] = { "output_mimetype": exporter_class.output_mimetype, } self.finish(json.dumps(res)) default_handlers = [ (r"/api/nbconvert", NbconvertRootHandler), ] jupyter_server-1.13.1/jupyter_server/services/security/000077500000000000000000000000001415445537200234615ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/security/__init__.py000066400000000000000000000004071415445537200255730ustar00rootroot00000000000000# URI for the CSP Report. Included here to prevent a cyclic dependency. # csp_report_uri is needed both by the BaseHandler (for setting the report-uri) # and by the CSPReportHandler (which depends on the BaseHandler). csp_report_uri = r"/api/security/csp-report" jupyter_server-1.13.1/jupyter_server/services/security/handlers.py000066400000000000000000000015541415445537200256400ustar00rootroot00000000000000"""Tornado handlers for security logging.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from tornado import web from . import csp_report_uri from ...base.handlers import APIHandler class CSPReportHandler(APIHandler): """Accepts a content security policy violation report""" _track_activity = False def skip_check_origin(self): """Don't check origin when reporting origin-check violations!""" return True def check_xsrf_cookie(self): # don't check XSRF for CSP reports return @web.authenticated def post(self): """Log a content security policy violation report""" self.log.warning( "Content security violation: %s", self.request.body.decode("utf8", "replace") ) default_handlers = [(csp_report_uri, CSPReportHandler)] jupyter_server-1.13.1/jupyter_server/services/sessions/000077500000000000000000000000001415445537200234605ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/sessions/__init__.py000066400000000000000000000000001415445537200255570ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/services/sessions/handlers.py000066400000000000000000000152431415445537200256370ustar00rootroot00000000000000"""Tornado handlers for the sessions web service. Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#sessions-api """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import json try: from jupyter_client.jsonutil import json_default except ImportError: from jupyter_client.jsonutil import date_default as json_default from jupyter_client.kernelspec import NoSuchKernel from tornado import web from ...base.handlers import APIHandler from jupyter_server.utils import ensure_async from jupyter_server.utils import url_path_join class SessionRootHandler(APIHandler): @web.authenticated async def get(self): # Return a list of running sessions sm = self.session_manager sessions = await ensure_async(sm.list_sessions()) self.finish(json.dumps(sessions, default=json_default)) @web.authenticated async def post(self): # Creates a new session # (unless a session already exists for the named session) sm = self.session_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") if "notebook" in model and "path" in model["notebook"]: self.log.warning("Sessions API changed, see updated swagger docs") model["path"] = model["notebook"]["path"] model["type"] = "notebook" try: path = model["path"] except KeyError as e: raise web.HTTPError(400, "Missing field in JSON data: path") from e try: mtype = model["type"] except KeyError as e: raise web.HTTPError(400, "Missing field in JSON data: type") from e name = model.get("name", None) kernel = model.get("kernel", {}) kernel_name = kernel.get("name", None) kernel_id = kernel.get("id", None) if not kernel_id and not kernel_name: self.log.debug("No kernel specified, using default kernel") kernel_name = None exists = await ensure_async(sm.session_exists(path=path)) if exists: model = await sm.get_session(path=path) else: try: model = await sm.create_session( path=path, kernel_name=kernel_name, kernel_id=kernel_id, name=name, type=mtype ) except NoSuchKernel: msg = ( "The '%s' kernel is not available. Please pick another " "suitable kernel instead, or install that kernel." % kernel_name ) status_msg = "%s not found" % kernel_name self.log.warning("Kernel not found: %s" % kernel_name) self.set_status(501) self.finish(json.dumps(dict(message=msg, short_message=status_msg))) return except Exception as e: raise web.HTTPError(500, str(e)) from e location = url_path_join(self.base_url, "api", "sessions", model["id"]) self.set_header("Location", location) self.set_status(201) self.finish(json.dumps(model, default=json_default)) class SessionHandler(APIHandler): @web.authenticated async def get(self, session_id): # Returns the JSON model for a single session sm = self.session_manager model = await sm.get_session(session_id=session_id) self.finish(json.dumps(model, default=json_default)) @web.authenticated async def patch(self, session_id): """Patch updates sessions: - path updates session to track renamed paths - kernel.name starts a new kernel with a given kernelspec """ sm = self.session_manager km = self.kernel_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") # get the previous session model before = await sm.get_session(session_id=session_id) changes = {} if "notebook" in model and "path" in model["notebook"]: self.log.warning("Sessions API changed, see updated swagger docs") model["path"] = model["notebook"]["path"] model["type"] = "notebook" if "path" in model: changes["path"] = model["path"] if "name" in model: changes["name"] = model["name"] if "type" in model: changes["type"] = model["type"] if "kernel" in model: # Kernel id takes precedence over name. if model["kernel"].get("id") is not None: kernel_id = model["kernel"]["id"] if kernel_id not in km: raise web.HTTPError(400, "No such kernel: %s" % kernel_id) changes["kernel_id"] = kernel_id elif model["kernel"].get("name") is not None: kernel_name = model["kernel"]["name"] kernel_id = await sm.start_kernel_for_session( session_id, kernel_name=kernel_name, name=before["name"], path=before["path"], type=before["type"], ) changes["kernel_id"] = kernel_id await sm.update_session(session_id, **changes) model = await sm.get_session(session_id=session_id) if model["kernel"]["id"] != before["kernel"]["id"]: # kernel_id changed because we got a new kernel # shutdown the old one fut = asyncio.ensure_future(ensure_async(km.shutdown_kernel(before["kernel"]["id"]))) # If we are not using pending kernels, wait for the kernel to shut down if not getattr(km, "use_pending_kernels", None): await fut self.finish(json.dumps(model, default=json_default)) @web.authenticated async def delete(self, session_id): # Deletes the session with given session_id sm = self.session_manager try: await sm.delete_session(session_id) except KeyError as e: # the kernel was deleted but the session wasn't! raise web.HTTPError(410, "Kernel deleted before session") from e self.set_status(204) self.finish() # ----------------------------------------------------------------------------- # URL to handler mappings # ----------------------------------------------------------------------------- _session_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" default_handlers = [ (r"/api/sessions/%s" % _session_id_regex, SessionHandler), (r"/api/sessions", SessionRootHandler), ] jupyter_server-1.13.1/jupyter_server/services/sessions/sessionmanager.py000066400000000000000000000271741415445537200270630ustar00rootroot00000000000000"""A base class session manager.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import pathlib import uuid try: import sqlite3 except ImportError: # fallback on pysqlite2 if Python was build without sqlite from pysqlite2 import dbapi2 as sqlite3 from tornado import web from traitlets.config.configurable import LoggingConfigurable from traitlets import Instance from traitlets import Unicode from traitlets import validate from traitlets import TraitError from jupyter_server.utils import ensure_async from jupyter_server.traittypes import InstanceFromClasses class SessionManager(LoggingConfigurable): database_filepath = Unicode( default_value=":memory:", help=( "Th filesystem path to SQLite Database file " "(e.g. /path/to/session_database.db). By default, the session " "database is stored in-memory (i.e. `:memory:` setting from sqlite3) " "and does not persist when the current Jupyter Server shuts down." ), ).tag(config=True) @validate("database_filepath") def _validate_database_filepath(self, proposal): value = proposal["value"] if value == ":memory:": return value path = pathlib.Path(value) if path.exists(): # Verify that the database path is not a directory. if path.is_dir(): raise TraitError( "`database_filepath` expected a file path, but the given path is a directory." ) # Verify that database path is an SQLite 3 Database by checking its header. with open(value, "rb") as f: header = f.read(100) if not header.startswith(b"SQLite format 3") and not header == b"": raise TraitError("The given file is not an SQLite database file.") return value kernel_manager = Instance("jupyter_server.services.kernels.kernelmanager.MappingKernelManager") contents_manager = InstanceFromClasses( [ "jupyter_server.services.contents.manager.ContentsManager", "notebook.services.contents.manager.ContentsManager", ] ) # Session database initialized below _cursor = None _connection = None _columns = {"session_id", "path", "name", "type", "kernel_id"} @property def cursor(self): """Start a cursor and create a database called 'session'""" if self._cursor is None: self._cursor = self.connection.cursor() self._cursor.execute( """CREATE TABLE IF NOT EXISTS session (session_id, path, name, type, kernel_id)""" ) return self._cursor @property def connection(self): """Start a database connection""" if self._connection is None: # Set isolation level to None to autocommit all changes to the database. self._connection = sqlite3.connect(self.database_filepath, isolation_level=None) self._connection.row_factory = sqlite3.Row return self._connection def close(self): """Close the sqlite connection""" if self._cursor is not None: self._cursor.close() self._cursor = None def __del__(self): """Close connection once SessionManager closes""" self.close() async def session_exists(self, path): """Check to see if the session of a given name exists""" exists = False self.cursor.execute("SELECT * FROM session WHERE path=?", (path,)) row = self.cursor.fetchone() if row is not None: # Note, although we found a row for the session, the associated kernel may have # been culled or died unexpectedly. If that's the case, we should delete the # row, thereby terminating the session. This can be done via a call to # row_to_model that tolerates that condition. If row_to_model returns None, # we'll return false, since, at that point, the session doesn't exist anyway. model = await self.row_to_model(row, tolerate_culled=True) if model is not None: exists = True return exists def new_session_id(self): "Create a uuid for a new session" return str(uuid.uuid4()) async def create_session( self, path=None, name=None, type=None, kernel_name=None, kernel_id=None ): """Creates a session and returns its model""" session_id = self.new_session_id() if kernel_id is not None and kernel_id in self.kernel_manager: pass else: kernel_id = await self.start_kernel_for_session( session_id, path, name, type, kernel_name ) result = await self.save_session( session_id, path=path, name=name, type=type, kernel_id=kernel_id ) return result async def start_kernel_for_session(self, session_id, path, name, type, kernel_name): """Start a new kernel for a given session.""" # allow contents manager to specify kernels cwd kernel_path = self.contents_manager.get_kernel_path(path=path) kernel_id = await self.kernel_manager.start_kernel( path=kernel_path, kernel_name=kernel_name ) return kernel_id async def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None): """Saves the items for the session with the given session_id Given a session_id (and any other of the arguments), this method creates a row in the sqlite session database that holds the information for a session. Parameters ---------- session_id : str uuid for the session; this method must be given a session_id path : str the path for the given session name: str the name of the session type: string the type of the session kernel_id : str a uuid for the kernel associated with this session Returns ------- model : dict a dictionary of the session model """ self.cursor.execute( "INSERT INTO session VALUES (?,?,?,?,?)", (session_id, path, name, type, kernel_id) ) result = await self.get_session(session_id=session_id) return result async def get_session(self, **kwargs): """Returns the model for a particular session. Takes a keyword argument and searches for the value in the session database, then returns the rest of the session's info. Parameters ---------- **kwargs : keyword argument must be given one of the keywords and values from the session database (i.e. session_id, path, name, type, kernel_id) Returns ------- model : dict returns a dictionary that includes all the information from the session described by the kwarg. """ if not kwargs: raise TypeError("must specify a column to query") conditions = [] for column in kwargs.keys(): if column not in self._columns: raise TypeError("No such column: %r", column) conditions.append("%s=?" % column) query = "SELECT * FROM session WHERE %s" % (" AND ".join(conditions)) self.cursor.execute(query, list(kwargs.values())) try: row = self.cursor.fetchone() except KeyError: # The kernel is missing, so the session just got deleted. row = None if row is None: q = [] for key, value in kwargs.items(): q.append("%s=%r" % (key, value)) raise web.HTTPError(404, u"Session not found: %s" % (", ".join(q))) model = await self.row_to_model(row) return model async def update_session(self, session_id, **kwargs): """Updates the values in the session database. Changes the values of the session with the given session_id with the values from the keyword arguments. Parameters ---------- session_id : str a uuid that identifies a session in the sqlite3 database **kwargs : str the key must correspond to a column title in session database, and the value replaces the current value in the session with session_id. """ await self.get_session(session_id=session_id) if not kwargs: # no changes return sets = [] for column in kwargs.keys(): if column not in self._columns: raise TypeError("No such column: %r" % column) sets.append("%s=?" % column) query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) self.cursor.execute(query, list(kwargs.values()) + [session_id]) def kernel_culled(self, kernel_id): """Checks if the kernel is still considered alive and returns true if its not found. """ return kernel_id not in self.kernel_manager async def row_to_model(self, row, tolerate_culled=False): """Takes sqlite database session row and turns it into a dictionary""" kernel_culled = await ensure_async(self.kernel_culled(row["kernel_id"])) if kernel_culled: # The kernel was culled or died without deleting the session. # We can't use delete_session here because that tries to find # and shut down the kernel - so we'll delete the row directly. # # If caller wishes to tolerate culled kernels, log a warning # and return None. Otherwise, raise KeyError with a similar # message. self.cursor.execute("DELETE FROM session WHERE session_id=?", (row["session_id"],)) msg = ( "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " "invalidating session '{session_id}'. The session has been removed.".format( kernel_id=row["kernel_id"], session_id=row["session_id"] ) ) if tolerate_culled: self.log.warning(msg + " Continuing...") return raise KeyError(msg) kernel_model = await ensure_async(self.kernel_manager.kernel_model(row["kernel_id"])) model = { "id": row["session_id"], "path": row["path"], "name": row["name"], "type": row["type"], "kernel": kernel_model, } if row["type"] == "notebook": # Provide the deprecated API. model["notebook"] = {"path": row["path"], "name": row["name"]} return model async def list_sessions(self): """Returns a list of dictionaries containing all the information from the session database""" c = self.cursor.execute("SELECT * FROM session") result = [] # We need to use fetchall() here, because row_to_model can delete rows, # which messes up the cursor if we're iterating over rows. for row in c.fetchall(): try: model = await self.row_to_model(row) result.append(model) except KeyError: pass return result async def delete_session(self, session_id): """Deletes the row in the session database with given session_id""" session = await self.get_session(session_id=session_id) await ensure_async(self.kernel_manager.shutdown_kernel(session["kernel"]["id"])) self.cursor.execute("DELETE FROM session WHERE session_id=?", (session_id,)) jupyter_server-1.13.1/jupyter_server/services/shutdown.py000066400000000000000000000007171415445537200240440ustar00rootroot00000000000000"""HTTP handler to shut down the Jupyter server. """ from tornado import ioloop from tornado import web from jupyter_server.base.handlers import JupyterHandler class ShutdownHandler(JupyterHandler): @web.authenticated async def post(self): self.log.info("Shutting down on /api/shutdown request.") await self.serverapp._cleanup() ioloop.IOLoop.current().stop() default_handlers = [ (r"/api/shutdown", ShutdownHandler), ] jupyter_server-1.13.1/jupyter_server/static/000077500000000000000000000000001415445537200212565ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/static/favicon.ico000066400000000000000000000764461415445537200234200ustar00rootroot00000000000000 hF 00 %V@@ (B:(  @&w&wW&ww&ww&wW&w&w%&w&w&w&w&w&w&w&w&w%&wO&w&w&w&w&w&w&w&w&w&w&wO&wA&w&w&wW'w&x&wW&w&w&wA'y &w&w)&w)&w'y 'x&z'x'x'x'x&z'x'y &w&w)&w)&w'y &wA&w&w&wW&w&x&wW&w&w&wA&wO&w&w&w&w&w&w&w&w&w&w&wO&w%&w&w&w&w&w&w&w&w&w%&w&wW&ww&ww&wW'w( @ (w &xW&w&w&w&w&w&w&w&w&xW(w 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1(x%&w&w&w&w&w&w'wM'z'z&wM&w&w&w&w&w&w(x%'| &w&w&w&w'w)(w)&w&w&w&w'| &w&w&w(x&z&w&w&w'y%&w'x5(x5&w'y%'xg&z'x'xe+x +x +x +x 'xe'x&z'xg'y%&w(x5'x5&w'y%&w&w&w'x(x&w&w&w'| &w&w&w&w(x''w)&w&w&w&w'| (x%&w&w&w&w&w&w&wM'z&{'wM&w&w&w&w&w&w(x%'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#(w &xW&w&w&w&w&w&w&w&w'wU(w (0` %+z 'x/'xO&wa&wm%wy%wy&wm&wa'xO'x/+z (w &wE&w&w&w&w&w&w&w&w&w&w&w&w&w&x&wE(w )z 'y3&xy&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&xy'y3)z )y!&w&w&w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w&v&w&w)y!+y &xY&w%v%v&w%w%v&w%w%v&w%w%v&w&v&w&w&v&w&w&v&w&w&v&w&v&w&xY+y 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&w%w&w&w&w&w'x.'y/&w&v&v&w&v&w%w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w%v&w%w%v&w%w&w'y/.'z%&w&w&w&w&w%w%v&w%w%v&w&w&w&w&w&w&w&w&w&w&w&w%v&v&w&w&v&w&w&v&w&w&w&w'z'(w&w&w&w&w&w&w&w&w&w'w'wI'|(z'xG&w}&w&w&v&w%w&w&w%w&w'w&x&w&v&v&w%v&v&w'x_'w+(w*}(x+&w]&w&v&w%v&w%w&w&w'|'wu&w&w&w&w'ws'w/'w)w(w-&wo&v&w&w&w'wu'|&vI&w&w&w&xk(x&z &wi&w&w&v&wI& &w%w'x'y+&w)'w&w&w& 'yM&w&wi*z+x'wg&w'yM)}&x&y)'x)&w)}(x#(x+(x+(x#+x+x+x+w*w+x+x+x(x#(x+(x+(x#)}&w'x)&z)&x)}'yM&w'wg+x)x&wg&w'yM& &w&w'w&w)'y+&w&w&w% &vG&w&v&w'wi'x 'x&xk&w&w&v&wI'|&wu&w&w&w&w&wo(x-)y'w&x/&ws&w&w&w&w'wu'|'x&w%w&w&w&w&w&w&w](x)*}(w'w+'x_&w&w&v&w&v&w&w&w(x&w%w&w&w%w&w&w&w&w&v}'xG(y&}&xI'v&w&w&w&w&v&w&w&v&w(x'z'&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'z'.'y/&w%w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&v&w&w&v&w&w&v&w&w&v&w&w&v&w&w'y/-'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&w&w&w'x+y &xY&w&w&w%w%w&w%w%w&w%w%w&w%w&w&v&w&w&v&w&w&v&w&w&v%v&w&xY+y )z!&w&w&w%w&w&w%w&w&w%w&w&w&w&w%w&w&w%w&w&w%w%w&w&w)z!)z 'y3&wy&w&w%w%w&w%w%w&w%w&w&w&w&w&w&w&w&w&w&wy&y3)z (w &wE&w&w&w&w&w&w&w&w%w&w&w&w&w'w'wE(w +z 'x/'xO&va&wm&wy&wy&wm&wa'xO'x/+z (@ B3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x&x(w-. 'yc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'yc. 3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'w'y3 )zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x&x'x'w&x(yu'wi'wi(yu&x'x'w'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w'w'w}'xC')z'zA(w{&x'x&w&w&w&w&w&w&w&w&w&w'x(x&w&w&w&w&w&w&w&w'x'x}(w-*}+(w{&w&w&w&w&w&w&w&w&w(x'xi&w&w&w&w&w&w&w'w}(y'+x%(w{'w&w&w&w&w&w&w'xi'|!'w&w&w&w&w'x(w[@@&xW&w&w&w&w&w'w'|!'w&w&w&w&x(xG&zC'w&w&w&w'w'wi&w&w'x'yo@U&wk'w&w&w'wi&'x&w&x.1y'x&w'x&(y&x&x'x}&x(y. &x&zI'xI&w. (ye)xE)xE(ye+x%+x%+x%+x%(ye)xE)xE(ye. &w'xI&zI&x. (y&x'x}&x&x(y&'x&w'x1y,z&x&w'x&'wi&w&w'w&wkU@'yo'x&w&w'wi'w&w&w&w'w'xC(xG&x&w&w&w'w'|!'w&w&w&w&w&w&xWU3(w['x&w&w&w&w'w'|!'xi&w&w&w&w&w&w'w(xy,|#(y''w}&w&w&w&w&w&w&w'xi(x&w&w&w&w&w&w&w&w&x(w{*}+(w-'x}'x&w&w&w&w&w&w&w&w(x'x&w&w&w&w&w&w&w&w&w&w'w&w(w{'zA)z'&zC'w}'w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'w'x&x(yu'wi'wi(yu&x'w'x&x&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+)zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'y3 . 'xc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'xc. (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x'w(w-3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 jupyter_server-1.13.1/jupyter_server/static/favicons/000077500000000000000000000000001415445537200230665ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/static/favicons/favicon-busy-1.ico000066400000000000000000000021761415445537200263330ustar00rootroot00000000000000 h(  %%+,uv6  665jupyter_server-1.13.1/jupyter_server/static/favicons/favicon-busy-2.ico000066400000000000000000000021761415445537200263340ustar00rootroot00000000000000 h(  %%6665wu*+jupyter_server-1.13.1/jupyter_server/static/favicons/favicon-busy-3.ico000066400000000000000000000021761415445537200263350ustar00rootroot00000000000000 h(  %%6621226  5tw*- jupyter_server-1.13.1/jupyter_server/static/favicons/favicon-file.ico000066400000000000000000000021761415445537200261320ustar00rootroot00000000000000 h(  %%mim ߃߳ߣ߇jupyter_server-1.13.1/jupyter_server/static/favicons/favicon-notebook.ico000066400000000000000000000021761415445537200270330ustar00rootroot00000000000000 h(  %%@X-2i?0~jA1l4f=/Ãjupyter_server-1.13.1/jupyter_server/static/favicons/favicon-terminal.ico000066400000000000000000000021761415445537200270260ustar00rootroot00000000000000 h(  %%qcjupyter_server-1.13.1/jupyter_server/static/favicons/favicon.ico000066400000000000000000000764461415445537200252300ustar00rootroot00000000000000 hF 00 %V@@ (B:(  @&w&wW&ww&ww&wW&w&w%&w&w&w&w&w&w&w&w&w%&wO&w&w&w&w&w&w&w&w&w&w&wO&wA&w&w&wW'w&x&wW&w&w&wA'y &w&w)&w)&w'y 'x&z'x'x'x'x&z'x'y &w&w)&w)&w'y &wA&w&w&wW&w&x&wW&w&w&wA&wO&w&w&w&w&w&w&w&w&w&w&wO&w%&w&w&w&w&w&w&w&w&w%&w&wW&ww&ww&wW'w( @ (w &xW&w&w&w&w&w&w&w&w&xW(w 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1(x%&w&w&w&w&w&w'wM'z'z&wM&w&w&w&w&w&w(x%'| &w&w&w&w'w)(w)&w&w&w&w'| &w&w&w(x&z&w&w&w'y%&w'x5(x5&w'y%'xg&z'x'xe+x +x +x +x 'xe'x&z'xg'y%&w(x5'x5&w'y%&w&w&w'x(x&w&w&w'| &w&w&w&w(x''w)&w&w&w&w'| (x%&w&w&w&w&w&w&wM'z&{'wM&w&w&w&w&w&w(x%'y1&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y1'x!&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x!+y &w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w+y 'y#&w&w&w&w&w&w&w&w&w&w&w&w&w&w'y#(w &xW&w&w&w&w&w&w&w&w'wU(w (0` %+z 'x/'xO&wa&wm%wy%wy&wm&wa'xO'x/+z (w &wE&w&w&w&w&w&w&w&w&w&w&w&w&w&x&wE(w )z 'y3&xy&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&xy'y3)z )y!&w&w&w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w&v&w&w)y!+y &xY&w%v%v&w%w%v&w%w%v&w%w%v&w&v&w&w&v&w&w&v&w&w&v&w&v&w&xY+y 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w%w&w&w%w&w&w%w&w&w%w&w&w&w&w'x.'y/&w&v&v&w&v&w%w&v&w%w&v&w%w&v&w%w%v&w%w%v&w%w%v&w%w%v&w%w%v&w%w&w'y/.'z%&w&w&w&w&w%w%v&w%w%v&w&w&w&w&w&w&w&w&w&w&w&w%v&v&w&w&v&w&w&v&w&w&w&w'z'(w&w&w&w&w&w&w&w&w&w'w'wI'|(z'xG&w}&w&w&v&w%w&w&w%w&w'w&x&w&v&v&w%v&v&w'x_'w+(w*}(x+&w]&w&v&w%v&w%w&w&w'|'wu&w&w&w&w'ws'w/'w)w(w-&wo&v&w&w&w'wu'|&vI&w&w&w&xk(x&z &wi&w&w&v&wI& &w%w'x'y+&w)'w&w&w& 'yM&w&wi*z+x'wg&w'yM)}&x&y)'x)&w)}(x#(x+(x+(x#+x+x+x+w*w+x+x+x(x#(x+(x+(x#)}&w'x)&z)&x)}'yM&w'wg+x)x&wg&w'yM& &w&w'w&w)'y+&w&w&w% &vG&w&v&w'wi'x 'x&xk&w&w&v&wI'|&wu&w&w&w&w&wo(x-)y'w&x/&ws&w&w&w&w'wu'|'x&w%w&w&w&w&w&w&w](x)*}(w'w+'x_&w&w&v&w&v&w&w&w(x&w%w&w&w%w&w&w&w&w&v}'xG(y&}&xI'v&w&w&w&w&v&w&w&v&w(x'z'&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'z'.'y/&w%w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&v&w&w&v&w&w&v&w&w&v&w&w&v&w&w'y/-'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&v&w&w&v&w&w&v&w&w&v&w&w&w&w'x+y &xY&w&w&w%w%w&w%w%w&w%w%w&w%w&w&v&w&w&v&w&w&v&w&w&v%v&w&xY+y )z!&w&w&w%w&w&w%w&w&w%w&w&w&w&w%w&w&w%w&w&w%w%w&w&w)z!)z 'y3&wy&w&w%w%w&w%w%w&w%w&w&w&w&w&w&w&w&w&w&wy&y3)z (w &wE&w&w&w&w&w&w&w&w%w&w&w&w&w'w'wE(w +z 'x/'xO&va&wm&wy&wy&wm&wa'xO'x/+z (@ B3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x&x(w-. 'yc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'yc. 3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'w'y3 )zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x&x'x'w&x(yu'wi'wi(yu&x'x'w'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w'w'w}'xC')z'zA(w{&x'x&w&w&w&w&w&w&w&w&w&w'x(x&w&w&w&w&w&w&w&w'x'x}(w-*}+(w{&w&w&w&w&w&w&w&w&w(x'xi&w&w&w&w&w&w&w'w}(y'+x%(w{'w&w&w&w&w&w&w'xi'|!'w&w&w&w&w'x(w[@@&xW&w&w&w&w&w'w'|!'w&w&w&w&x(xG&zC'w&w&w&w'w'wi&w&w'x'yo@U&wk'w&w&w'wi&'x&w&x.1y'x&w'x&(y&x&x'x}&x(y. &x&zI'xI&w. (ye)xE)xE(ye+x%+x%+x%+x%(ye)xE)xE(ye. &w'xI&zI&x. (y&x'x}&x&x(y&'x&w'x1y,z&x&w'x&'wi&w&w'w&wkU@'yo'x&w&w'wi'w&w&w&w'w'xC(xG&x&w&w&w'w'|!'w&w&w&w&w&w&xWU3(w['x&w&w&w&w'w'|!'xi&w&w&w&w&w&w'w(xy,|#(y''w}&w&w&w&w&w&w&w'xi(x&w&w&w&w&w&w&w&w&x(w{*}+(w-'x}'x&w&w&w&w&w&w&w&w(x'x&w&w&w&w&w&w&w&w&w&w'w&w(w{'zA)z'&zC'w}'w&w&w&w&w&w&w&w&w&w&w&w'x3'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'w'x&x(yu'wi'wi(yu&x'w'x&x&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x3. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x. 'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x(yS+y+&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x+y+)zY&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w)zY3 'y'w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'y3 . 'xc&x&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&w&x'xc. (w-&x'x'x&w&w&w&w&w&w&w&w&w&w&w&w&w&w'x'x'w(w-3 (xA'x{'x'w&w&x&w&w&x&w'w'x'x{(xA3 jupyter_server-1.13.1/jupyter_server/static/logo/000077500000000000000000000000001415445537200222165ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/static/logo/logo.png000066400000000000000000000134421415445537200236700ustar00rootroot00000000000000PNG  IHDR8vsRGBIDATx xTEN IwA(EaDaaW]$<$#0 #8HDQqp]a@QfQ"!7Iھ7uo;!:uԩsT:U}kM(dgg'k_ӌ]=ڄXtYq%`n5bfΜ9-80+6$@)07JHT89xXk"e7]>\ 8%d(.u2}^Qx u%h23\y<ƞpJ% ,=~3Wʖٷu~LAH4FKz\[e H,~)Jo⼔[7'NTQQ5U׵(WgzNNzMhJy Ø0U>45UF\VOznxDܚFe廱2ӑg1L(N~򙕁1*iy0|RD=M%$%6!aͿ0 0 +$12IJJcٳf*0ř.6*OV3fn(l׍XO2^W\C靇=Ʒ ^o'$\~ٹ̞Z:&b hB +J 腛9sf1dUIgu%`Tj#k!؝4-&Y1N" b(hCB0sFk-*qSjnʕ@Ӕ@Pd"\`qNrl ]c]J}={ghy#ҶF  =0Ec+'϶enڕ@S^e{(~N,Ai4-?߄#.гfTľl_x,Pi0GN]dÇw<ءC?|ܜThPŒn`8Xir|]|K6ƭ:ed_TR~h,etG.uF [m_}8"0e~[oZ"F[kxf & xuUUU:x:@[Myu6lp<}G;ϚXb.3qio_f =%O\"&XScQ,K;1|6ДC}< 17= 82|ڬENֵ׍* W=-Qb;nh1ԳW~EzPd`[_3#<~3#GP>Q!cR ٬'I6ali_Fվef2%nI%ѓ?UY֌RG_K?S^޽{f%; CW2eGӝ6,:L*y*H3ڮ<ڻ g-04-yL\GC~A,'AOoaXK:i_<ŀ9[?a_աS|c7gS L'^x1'xVҿ!7p` e0O%L- e078`[Me,Wc[ĕ|d_<[)_/ҲeVMZLŽ&g} SR ʓPTH:!%9ZήaFptjd^RkBmf/%x$&yÀ-i1cp)`Z /X$FSf`h۷>к{%$[K^ʕ+ѶjsIKr#K'B!bogτL84%1;DJgb3Lb'"7/t1rh U-^읎?TaGT3j5S*(yqa9I6ڧ컔}뭷vj&l+MxU7❏?AKPJYЄ&*KaS k̾> $/qC-/^Mo2::tfre.0;r-UXm]V>|7s!xG-[V$@aq mJA WOhfCv"\/'&Ha2H5C} 53bo^ɱJ{}yYրatkl_MFmT+ JM/\_x3V\9|~.]*& ,=Fͫ Oԩ[J>IrJ~棥 }"&wZ4![ָEnumr@Yg%&ޅ*<3(mwY0MZW8/+JHؿۂ6 Uhi"e ]ueK?lȷPC `'(r{2"'N<`t]K޳gOI7CLZT;*uh ' ܲzәPÌ o+|PqOw1du*PjOCF˺ v8P;a[lj!"f^3#1Mb6E\k 3!`z,|VYTI<#&3U8**P%SQyRVaqpuR iwulk 3/UA<%SNdq*m =uBu EvUIXj4ҕ˥ORq\Q˜ 8QF $G}]|u@QN>EEEo^5OSWYrGՒ^C 3Qf>֘o}PB+rn$$zw`F/ymS_.Ijnť!"M>M֯u& ?>oњ]nݺ6C)?/w(N\x>7F`ہLcA/pf O"}َ]~e - *41@'9~L^0 HQˀ#/Q+m7nVQrI =hsy|EZ~v {60%?&WfB_rEF[c P42%_z^}-;.gRYl}Dwi,7/"&8xw*>mkK^绕x+}Lq8YC 9Rg%x~!? …M6r :X G=\ӗ#< AR =]ahGJ-}q}ק|ر<bO}HQsl bR J2s0/PdE`覲SbrV0N?2Gz+Qtp܈ٶmEs"VK.A/WaB(oc#pWB$F*<iی/ GWrt8<ݛ2}Ibue4G*͘WQ /ngוTT Θ7.㴿?'fl{Io6b\*ѺuH+X-n"L6Oa_O[=@mG a-8pI-B~vRĔJ ; y LQA`Դ!TBB!ք1~͟j;=;oV-Q58,ָ@6ynLjs""8cjU1Nyggĉd 2 '@nf&tG$f?6n\-kS g2{l_?d/WNxӓs>_/BO}1Azi5Wx%*<,^L.6|J콌$K3`=}\)2(GyҐ͖$kL $%/&x5SpH 1xvIH7#;a"ՏgGMt>,kfyfCf7牥.cf-\SUeմl?z.hrݤ3䢲4rPĢR3rCwh6.p d@0]G/;k iu X5՟#Iz΁]x伧;}R>ud'h Eu>avTyF U3mdm]lTgx6K\ͣR5ֿj/0 +gNy̸yIĊ}JnrGKϟdT؊hڐr0A/ \ P$P@_)7/Ʈ,baDaڎcJ"K=bUƮ\ TK™ ::AC-/6rgƮN7 ?'o #header { display: block; background-color: #fff; position: relative; z-index: 100; } body > #header #header-container { display: flex; flex-direction: row; justify-content: space-between; padding: 5px; padding-top: 5px; padding-bottom: 5px; padding-bottom: 5px; padding-top: 5px; box-sizing: border-box; -moz-box-sizing: border-box; -webkit-box-sizing: border-box; } body > #header .header-bar { width: 100%; height: 1px; background: #e7e7e7; margin-bottom: -1px; } .navbar-brand { float: left; height: 30px; padding: 6px 0px; padding-top: 6px; padding-bottom: 6px; padding-left: 0px; font-size: 17px; line-height: 18px; } .navbar-brand, .navbar-nav > li > a { text-shadow: 0 1px 0 rgba(255, 255, 255, 0.25); } .nav { padding-left: 0; margin-bottom: 0; list-style: none; } .center-nav { display: inline-block; margin-bottom: -4px; } div.error { margin: 2em; text-align: center; } div.error > h1 { font-size: 500%; line-height: normal; } div.error > p { font-size: 200%; line-height: normal; } jupyter_server-1.13.1/jupyter_server/templates/000077500000000000000000000000001415445537200217655ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/templates/404.html000066400000000000000000000002221415445537200231560ustar00rootroot00000000000000{% extends "error.html" %} {% block error_detail %}

{% trans %}You are requesting a page that does not exist!{% endtrans %}

{% endblock %} jupyter_server-1.13.1/jupyter_server/templates/browser-open.html000066400000000000000000000007731415445537200253040ustar00rootroot00000000000000{# This template is not served, but written as a file to open in the browser, passing the token without putting it in a command-line argument. #} Opening Jupyter Application

This page should redirect you to a Jupyter application. If it doesn't, click here to go to Jupyter.

jupyter_server-1.13.1/jupyter_server/templates/error.html000066400000000000000000000012151415445537200240030ustar00rootroot00000000000000{% extends "page.html" %} {% block stylesheet %} {{super()}} {% endblock %} {% block site %}
{% block h1_error %}

{{status_code}} : {{status_message}}

{% endblock h1_error %} {% block error_detail %} {% if message %}

{% trans %}The error was:{% endtrans %}

{{message}}
{% endif %} {% endblock error_detail %}
{% endblock %} {% block script %} {% endblock script %} jupyter_server-1.13.1/jupyter_server/templates/login.html000066400000000000000000000105641415445537200237710ustar00rootroot00000000000000{% extends "page.html" %} {% block stylesheet %} {% endblock %} {% block site %}
{% if login_available %} {# login_available means password-login is allowed. Show the form. #}
{% else %}

{% trans %}No login available, you shouldn't be seeing this page.{% endtrans %}

{% endif %} {% if message %}
{% for key in message %}
{{message[key]}}
{% endfor %}
{% endif %} {% if token_available %} {% block token_message %}

Token authentication is enabled

If no password has been configured, you need to open the server with its login token in the URL, or paste it above. This requirement will be lifted if you enable a password.

The command:

jupyter server list
will show you the URLs of running servers with their tokens, which you can copy and paste into your browser. For example:

Currently running servers:
http://localhost:8888/?token=c8de56fa... :: /Users/you/notebooks

or you can paste just the token value into the password field on this page.

See the documentation on how to enable a password in place of token authentication, if you would like to avoid dealing with random tokens.

Cookies are required for authenticated access to the Jupyter server.

{% if allow_password_change %}

{% trans %}Setup a Password{% endtrans %}

You can also setup a password by entering your token and a new password on the fields below:

{{ xsrf_form_html() | safe }}
{% endif %}
{% endblock token_message %} {% endif %}
{% endblock %} {% block script %} {% endblock %} jupyter_server-1.13.1/jupyter_server/templates/logout.html000066400000000000000000000014671415445537200241740ustar00rootroot00000000000000{% extends "page.html" %} {# This template is rendered in response to an authenticated request, so the user is technically logged in. But when the user sees it, the cookie is cleared by the Javascript, so we should render this as if the user was logged out, without e.g. authentication tokens. #} {% set logged_in = False %} {% block stylesheet %} {% endblock %} {% block site %}
{% if message %} {% for key in message %}
{{message[key]}}
{% endfor %} {% endif %} {% if not login_available %} {% trans %}Proceed to the dashboard{% endtrans %}. {% else %} {% trans %}Proceed to the login page{% endtrans %}. {% endif %}
{% endblock %} jupyter_server-1.13.1/jupyter_server/templates/main.html000066400000000000000000000002231415445537200235740ustar00rootroot00000000000000{% extends "page.html" %} {% block site %}

A Jupyter Server is running.

{% endblock site %} jupyter_server-1.13.1/jupyter_server/templates/page.html000066400000000000000000000055221415445537200235730ustar00rootroot00000000000000 {% block title %}Jupyter Server{% endblock %} {% block favicon %} {% endblock %} {% block stylesheet %} {% endblock stylesheet %} {% block meta %} {% endblock meta %}
{% block site %} {% endblock site %}
{% block after_site %} {% endblock after_site %} {% block script %} {% endblock script %} jupyter_server-1.13.1/jupyter_server/templates/view.html000066400000000000000000000010561415445537200236270ustar00rootroot00000000000000 {{page_title}}
jupyter_server-1.13.1/jupyter_server/terminal/000077500000000000000000000000001415445537200216025ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/terminal/__init__.py000066400000000000000000000036561415445537200237250ustar00rootroot00000000000000import os import sys import terminado from ..utils import check_version if not check_version(terminado.__version__, "0.8.3"): raise ImportError("terminado >= 0.8.3 required, found %s" % terminado.__version__) from ipython_genutils.py3compat import which from jupyter_server.utils import url_path_join as ujoin from . import api_handlers from .handlers import TermSocket from .terminalmanager import TerminalManager def initialize(webapp, root_dir, connection_url, settings): if os.name == "nt": default_shell = "powershell.exe" else: default_shell = which("sh") shell_override = settings.get("shell_command") shell = [os.environ.get("SHELL") or default_shell] if shell_override is None else shell_override # When the notebook server is not running in a terminal (e.g. when # it's launched by a JupyterHub spawner), it's likely that the user # environment hasn't been fully set up. In that case, run a login # shell to automatically source /etc/profile and the like, unless # the user has specifically set a preferred shell command. if os.name != "nt" and shell_override is None and not sys.stdout.isatty(): shell.append("-l") terminal_manager = webapp.settings["terminal_manager"] = TerminalManager( shell_command=shell, extra_env={ "JUPYTER_SERVER_ROOT": root_dir, "JUPYTER_SERVER_URL": connection_url, }, parent=webapp.settings["serverapp"], ) terminal_manager.log = webapp.settings["serverapp"].log base_url = webapp.settings["base_url"] handlers = [ ( ujoin(base_url, r"/terminals/websocket/(\w+)"), TermSocket, {"term_manager": terminal_manager}, ), (ujoin(base_url, r"/api/terminals"), api_handlers.TerminalRootHandler), (ujoin(base_url, r"/api/terminals/(\w+)"), api_handlers.TerminalHandler), ] webapp.add_handlers(".*$", handlers) jupyter_server-1.13.1/jupyter_server/terminal/api_handlers.py000066400000000000000000000015661415445537200246150ustar00rootroot00000000000000import json from tornado import web from ..base.handlers import APIHandler class TerminalRootHandler(APIHandler): @web.authenticated def get(self): models = self.terminal_manager.list() self.finish(json.dumps(models)) @web.authenticated def post(self): """POST /terminals creates a new terminal and redirects to it""" data = self.get_json_body() or {} model = self.terminal_manager.create(**data) self.finish(json.dumps(model)) class TerminalHandler(APIHandler): SUPPORTED_METHODS = ("GET", "DELETE") @web.authenticated def get(self, name): model = self.terminal_manager.get(name) self.finish(json.dumps(model)) @web.authenticated async def delete(self, name): await self.terminal_manager.terminate(name, force=True) self.set_status(204) self.finish() jupyter_server-1.13.1/jupyter_server/terminal/handlers.py000066400000000000000000000026361415445537200237630ustar00rootroot00000000000000# encoding: utf-8 """Tornado handlers for the terminal emulator.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import terminado from tornado import web from ..base.handlers import JupyterHandler from ..base.zmqhandlers import WebSocketMixin from jupyter_server._tz import utcnow class TermSocket(WebSocketMixin, JupyterHandler, terminado.TermSocket): def origin_check(self): """Terminado adds redundant origin_check Tornado already calls check_origin, so don't do anything here. """ return True def get(self, *args, **kwargs): if not self.get_current_user(): raise web.HTTPError(403) if not args[0] in self.term_manager.terminals: raise web.HTTPError(404) return super(TermSocket, self).get(*args, **kwargs) def on_message(self, message): super(TermSocket, self).on_message(message) self._update_activity() def write_message(self, message, binary=False): super(TermSocket, self).write_message(message, binary=binary) self._update_activity() def _update_activity(self): self.application.settings["terminal_last_activity"] = utcnow() # terminal may not be around on deletion/cull if self.term_name in self.terminal_manager.terminals: self.terminal_manager.terminals[self.term_name].last_activity = utcnow() jupyter_server-1.13.1/jupyter_server/terminal/terminalmanager.py000066400000000000000000000145551415445537200253340ustar00rootroot00000000000000"""A MultiTerminalManager for use in the notebook webserver - raises HTTPErrors - creates REST API models """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from datetime import timedelta import terminado from tornado import web from tornado.ioloop import IOLoop from tornado.ioloop import PeriodicCallback from traitlets import Integer from traitlets.config import LoggingConfigurable from ..prometheus.metrics import TERMINAL_CURRENTLY_RUNNING_TOTAL from jupyter_server._tz import isoformat from jupyter_server._tz import utcnow class TerminalManager(LoggingConfigurable, terminado.NamedTermManager): """ """ _culler_callback = None _initialized_culler = False cull_inactive_timeout = Integer( 0, config=True, help="""Timeout (in seconds) in which a terminal has been inactive and ready to be culled. Values of 0 or lower disable culling.""", ) cull_interval_default = 300 # 5 minutes cull_interval = Integer( cull_interval_default, config=True, help="""The interval (in seconds) on which to check for terminals exceeding the inactive timeout value.""", ) # ------------------------------------------------------------------------- # Methods for managing terminals # ------------------------------------------------------------------------- def __init__(self, *args, **kwargs): super(TerminalManager, self).__init__(*args, **kwargs) def create(self, **kwargs): """Create a new terminal.""" name, term = self.new_named_terminal(**kwargs) # Monkey-patch last-activity, similar to kernels. Should we need # more functionality per terminal, we can look into possible sub- # classing or containment then. term.last_activity = utcnow() model = self.get_terminal_model(name) # Increase the metric by one because a new terminal was created TERMINAL_CURRENTLY_RUNNING_TOTAL.inc() # Ensure culler is initialized self._initialize_culler() return model def get(self, name): """Get terminal 'name'.""" model = self.get_terminal_model(name) return model def list(self): """Get a list of all running terminals.""" models = [self.get_terminal_model(name) for name in self.terminals] # Update the metric below to the length of the list 'terms' TERMINAL_CURRENTLY_RUNNING_TOTAL.set(len(models)) return models async def terminate(self, name, force=False): """Terminate terminal 'name'.""" self._check_terminal(name) await super(TerminalManager, self).terminate(name, force=force) # Decrease the metric below by one # because a terminal has been shutdown TERMINAL_CURRENTLY_RUNNING_TOTAL.dec() async def terminate_all(self): """Terminate all terminals.""" terms = [name for name in self.terminals] for term in terms: await self.terminate(term, force=True) def get_terminal_model(self, name): """Return a JSON-safe dict representing a terminal. For use in representing terminals in the JSON APIs. """ self._check_terminal(name) term = self.terminals[name] model = { "name": name, "last_activity": isoformat(term.last_activity), } return model def _check_terminal(self, name): """Check a that terminal 'name' exists and raise 404 if not.""" if name not in self.terminals: raise web.HTTPError(404, u"Terminal not found: %s" % name) def _initialize_culler(self): """Start culler if 'cull_inactive_timeout' is greater than zero. Regardless of that value, set flag that we've been here. """ if not self._initialized_culler and self.cull_inactive_timeout > 0: if self._culler_callback is None: loop = IOLoop.current() if self.cull_interval <= 0: # handle case where user set invalid value self.log.warning( "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", self.cull_interval, self.cull_interval_default, ) self.cull_interval = self.cull_interval_default self._culler_callback = PeriodicCallback( self._cull_terminals, 1000 * self.cull_interval ) self.log.info( "Culling terminals with inactivity > %s seconds at %s second intervals ...", self.cull_inactive_timeout, self.cull_interval, ) self._culler_callback.start() self._initialized_culler = True async def _cull_terminals(self): self.log.debug( "Polling every %s seconds for terminals inactive for > %s seconds...", self.cull_interval, self.cull_inactive_timeout, ) # Create a separate list of terminals to avoid conflicting updates while iterating for name in list(self.terminals): try: await self._cull_inactive_terminal(name) except Exception as e: self.log.exception( "The following exception was encountered while checking the " "activity of terminal {}: {}".format(name, e) ) async def _cull_inactive_terminal(self, name): try: term = self.terminals[name] except KeyError: return # KeyErrors are somewhat expected since the terminal can be terminated as the culling check is made. self.log.debug("name=%s, last_activity=%s", name, term.last_activity) if hasattr(term, "last_activity"): dt_now = utcnow() dt_inactive = dt_now - term.last_activity # Compute idle properties is_time = dt_inactive > timedelta(seconds=self.cull_inactive_timeout) # Cull the kernel if all three criteria are met if is_time: inactivity = int(dt_inactive.total_seconds()) self.log.warning( "Culling terminal '%s' due to %s seconds of inactivity.", name, inactivity ) await self.terminate(name, force=True) jupyter_server-1.13.1/jupyter_server/tests/000077500000000000000000000000001415445537200211315ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/__init__.py000066400000000000000000000000001415445537200232300ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/auth/000077500000000000000000000000001415445537200220725ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/auth/__init__.py000066400000000000000000000000001415445537200241710ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/auth/test_login.py000066400000000000000000000047651415445537200246270ustar00rootroot00000000000000"""Tests for login redirects""" from functools import partial from urllib.parse import urlencode import pytest from tornado.httpclient import HTTPClientError from tornado.httputil import parse_cookie from tornado.httputil import url_concat from jupyter_server.utils import url_path_join # override default config to ensure a non-empty base url is used @pytest.fixture def jp_base_url(): return "/a%40b/" @pytest.fixture def jp_server_config(jp_base_url): return { "ServerApp": { "base_url": jp_base_url, }, } async def _login(jp_serverapp, http_server_client, jp_base_url, next): # first: request login page with no creds login_url = url_path_join(jp_base_url, "login") first = await http_server_client.fetch(login_url) cookie_header = first.headers["Set-Cookie"] cookies = parse_cookie(cookie_header) # second, submit login form with credentials try: resp = await http_server_client.fetch( url_concat(login_url, {"next": next}), method="POST", body=urlencode( { "password": jp_serverapp.token, "_xsrf": cookies.get("_xsrf", ""), } ), headers={"Cookie": cookie_header}, follow_redirects=False, ) except HTTPClientError as e: if e.code != 302: raise return e.response.headers["Location"] else: assert resp.code == 302, "Should have returned a redirect!" @pytest.fixture def login(jp_serverapp, http_server_client, jp_base_url): """Fixture to return a function to login to a Jupyter server by submitting the login page form """ yield partial(_login, jp_serverapp, http_server_client, jp_base_url) @pytest.mark.parametrize( "bad_next", ( r"\\tree", "//some-host", "//host{base_url}tree", "https://google.com", "/absolute/not/base_url", ), ) async def test_next_bad(login, jp_base_url, bad_next): bad_next = bad_next.format(base_url=jp_base_url) url = await login(bad_next) assert url == jp_base_url @pytest.mark.parametrize( "next_path", ( "tree/", "//{base_url}tree", "notebooks/notebook.ipynb", "tree//something", ), ) async def test_next_ok(login, jp_base_url, next_path): next_path = next_path.format(base_url=jp_base_url) expected = jp_base_url + next_path actual = await login(next=expected) assert actual == expected jupyter_server-1.13.1/jupyter_server/tests/auth/test_security.py000066400000000000000000000016071415445537200253560ustar00rootroot00000000000000from jupyter_server.auth.security import passwd from jupyter_server.auth.security import passwd_check def test_passwd_structure(): p = passwd("passphrase") algorithm, hashed = p.split(":") assert algorithm == "argon2", algorithm assert hashed.startswith("$argon2id$"), hashed def test_roundtrip(): p = passwd("passphrase") assert passwd_check(p, "passphrase") def test_bad(): p = passwd("passphrase") assert not passwd_check(p, p) assert not passwd_check(p, "a:b:c:d") assert not passwd_check(p, "a:b") def test_passwd_check_unicode(): # GH issue #4524 phash = u"sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f" assert passwd_check(phash, u"łe¶ŧ←↓→") phash = ( u"argon2:$argon2id$v=19$m=10240,t=10,p=8$" u"qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg" ) assert passwd_check(phash, u"łe¶ŧ←↓→") jupyter_server-1.13.1/jupyter_server/tests/extension/000077500000000000000000000000001415445537200231455ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/extension/__init__.py000066400000000000000000000000001415445537200252440ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/extension/conftest.py000066400000000000000000000021111415445537200253370ustar00rootroot00000000000000import pytest from .mockextensions.app import MockExtensionApp mock_html = """ {% block title %}Jupyter Server 1{% endblock %} {% block meta %} {% endblock %}
{% block site %} {% endblock site %}
{% block after_site %} {% endblock after_site %} """ @pytest.fixture def mock_template(jp_template_dir): index = jp_template_dir.joinpath("index.html") index.write_text(mock_html) @pytest.fixture def extension_manager(jp_serverapp): return jp_serverapp.extension_manager @pytest.fixture def config_file(jp_config_dir): """""" f = jp_config_dir.joinpath("jupyter_mockextension_config.py") f.write_text("c.MockExtensionApp.mock_trait ='config from file'") return f @pytest.fixture(autouse=True) def jp_mockextension_cleanup(): yield MockExtensionApp.clear_instance() jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/000077500000000000000000000000001415445537200262165ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/__init__.py000066400000000000000000000011021415445537200303210ustar00rootroot00000000000000"""A mock extension module with a list of extensions to load in various tests. """ from .app import MockExtensionApp # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_points(): return [ {"module": "jupyter_server.tests.extension.mockextensions.app", "app": MockExtensionApp}, {"module": "jupyter_server.tests.extension.mockextensions.mock1"}, {"module": "jupyter_server.tests.extension.mockextensions.mock2"}, {"module": "jupyter_server.tests.extension.mockextensions.mock3"}, ] jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/app.py000066400000000000000000000032561415445537200273560ustar00rootroot00000000000000import os from traitlets import List from traitlets import Unicode from jupyter_server.base.handlers import JupyterHandler from jupyter_server.extension.application import ExtensionApp from jupyter_server.extension.application import ExtensionAppJinjaMixin from jupyter_server.extension.handler import ExtensionHandlerJinjaMixin from jupyter_server.extension.handler import ExtensionHandlerMixin STATIC_PATH = os.path.join(os.path.dirname(__file__), "static") # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_points(): return [{"module": __name__, "app": MockExtensionApp}] class MockExtensionHandler(ExtensionHandlerMixin, JupyterHandler): def get(self): self.finish(self.config.mock_trait) class MockExtensionTemplateHandler( ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler ): def get(self): self.write(self.render_template("index.html")) class MockExtensionApp(ExtensionAppJinjaMixin, ExtensionApp): name = "mockextension" template_paths = List().tag(config=True) static_paths = [STATIC_PATH] mock_trait = Unicode("mock trait", config=True) loaded = False serverapp_config = { "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions.mock1": True} } @staticmethod def get_extension_package(): return "jupyter_server.tests.extension.mockextensions" def initialize_handlers(self): self.handlers.append(("/mock", MockExtensionHandler)) self.handlers.append(("/mock_template", MockExtensionTemplateHandler)) self.loaded = True if __name__ == "__main__": MockExtensionApp.launch_instance() jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/mock1.py000066400000000000000000000004671415445537200276110ustar00rootroot00000000000000"""A mock extension named `mock1` for testing purposes. """ # by the test functions. def _jupyter_server_extension_paths(): return [{"module": "jupyter_server.tests.extension.mockextensions.mock1"}] def _load_jupyter_server_extension(serverapp): serverapp.mockI = True serverapp.mock_shared = "I" jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/mock2.py000066400000000000000000000004711415445537200276050ustar00rootroot00000000000000"""A mock extension named `mock2` for testing purposes. """ # by the test functions. def _jupyter_server_extension_paths(): return [{"module": "jupyter_server.tests.extension.mockextensions.mock2"}] def _load_jupyter_server_extension(serverapp): serverapp.mockII = True serverapp.mock_shared = "II" jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/mock3.py000066400000000000000000000001661415445537200276070ustar00rootroot00000000000000"""A mock extension named `mock3` for testing purposes. """ def _load_jupyter_server_extension(serverapp): pass jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/mockext_both.py000066400000000000000000000005071415445537200312600ustar00rootroot00000000000000"""A mock extension named `mockext_both` for testing purposes. """ # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_both"}] def _load_jupyter_server_extension(serverapp): pass jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/mockext_py.py000066400000000000000000000005031415445537200307500ustar00rootroot00000000000000"""A mock extension named `mockext_py` for testing purposes. """ # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_py"}] def _load_jupyter_server_extension(serverapp): pass jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/mockext_sys.py000066400000000000000000000005041415445537200311370ustar00rootroot00000000000000"""A mock extension named `mockext_py` for testing purposes. """ # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_sys"}] def _load_jupyter_server_extension(serverapp): pass jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/mockext_user.py000066400000000000000000000005071415445537200313020ustar00rootroot00000000000000"""A mock extension named `mockext_user` for testing purposes. """ # Function that makes these extensions discoverable # by the test functions. def _jupyter_server_extension_paths(): return [{"module": "jupyter_server.tests.extension.mockextensions.mockext_user"}] def _load_jupyter_server_extension(serverapp): pass jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/static/000077500000000000000000000000001415445537200275055ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/extension/mockextensions/static/mock.txt000066400000000000000000000000241415445537200311730ustar00rootroot00000000000000mock static content jupyter_server-1.13.1/jupyter_server/tests/extension/test_app.py000066400000000000000000000107061415445537200253420ustar00rootroot00000000000000import pytest from traitlets.config import Config from .mockextensions.app import MockExtensionApp from jupyter_server.serverapp import ServerApp from jupyter_server.utils import run_sync @pytest.fixture def jp_server_config(jp_template_dir): config = { "ServerApp": { "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True}, }, "MockExtensionApp": {"template_paths": [str(jp_template_dir)], "log_level": "DEBUG"}, } return config @pytest.fixture def mock_extension(extension_manager): name = "jupyter_server.tests.extension.mockextensions" pkg = extension_manager.extensions[name] point = pkg.extension_points["mockextension"] app = point.app return app def test_initialize(jp_serverapp, jp_template_dir, mock_extension): # Check that settings and handlers were added to the mock extension. assert isinstance(mock_extension.serverapp, ServerApp) assert len(mock_extension.handlers) > 0 assert mock_extension.loaded assert mock_extension.template_paths == [str(jp_template_dir)] @pytest.mark.parametrize( "trait_name, trait_value, jp_argv", (["mock_trait", "test mock trait", ["--MockExtensionApp.mock_trait=test mock trait"]],), ) def test_instance_creation_with_argv( trait_name, trait_value, jp_argv, mock_extension, ): assert getattr(mock_extension, trait_name) == trait_value def test_extensionapp_load_config_file( config_file, jp_serverapp, mock_extension, ): # Assert default config_file_paths is the same in the app and extension. assert mock_extension.config_file_paths == jp_serverapp.config_file_paths assert mock_extension.config_dir == jp_serverapp.config_dir assert mock_extension.config_file_name == "jupyter_mockextension_config" # Assert that the trait is updated by config file assert mock_extension.mock_trait == "config from file" OPEN_BROWSER_COMBINATIONS = ( (True, {}), (True, {"ServerApp": {"open_browser": True}}), (False, {"ServerApp": {"open_browser": False}}), (True, {"MockExtensionApp": {"open_browser": True}}), (False, {"MockExtensionApp": {"open_browser": False}}), (True, {"ServerApp": {"open_browser": True}, "MockExtensionApp": {"open_browser": True}}), (False, {"ServerApp": {"open_browser": True}, "MockExtensionApp": {"open_browser": False}}), (True, {"ServerApp": {"open_browser": False}, "MockExtensionApp": {"open_browser": True}}), (False, {"ServerApp": {"open_browser": False}, "MockExtensionApp": {"open_browser": False}}), ) @pytest.mark.parametrize("expected_value, config", OPEN_BROWSER_COMBINATIONS) def test_browser_open(monkeypatch, jp_environ, config, expected_value): serverapp = MockExtensionApp.initialize_server(config=Config(config)) assert serverapp.open_browser == expected_value def test_load_parallel_extensions(monkeypatch, jp_environ): serverapp = MockExtensionApp.initialize_server() exts = serverapp.extension_manager.extensions assert "jupyter_server.tests.extension.mockextensions.mock1" in exts assert "jupyter_server.tests.extension.mockextensions" in exts exts = serverapp.jpserver_extensions assert exts["jupyter_server.tests.extension.mockextensions.mock1"] assert exts["jupyter_server.tests.extension.mockextensions"] def test_stop_extension(jp_serverapp, caplog): """Test the stop_extension method. This should be fired by ServerApp.cleanup_extensions. """ calls = 0 # load extensions (make sure we only have the one extension loaded jp_serverapp.extension_manager.load_all_extensions() extension_name = "jupyter_server.tests.extension.mockextensions" assert list(jp_serverapp.extension_manager.extension_apps) == [extension_name] # add a stop_extension method for the extension app async def _stop(*args): nonlocal calls calls += 1 for apps in jp_serverapp.extension_manager.extension_apps.values(): for app in apps: if app: app.stop_extension = _stop # call cleanup_extensions, check the logging is correct caplog.clear() run_sync(jp_serverapp.cleanup_extensions()) assert [msg for *_, msg in caplog.record_tuples] == [ "Shutting down 1 extension", '{} | extension app "mockextension" stopping'.format(extension_name), '{} | extension app "mockextension" stopped'.format(extension_name), ] # check the shutdown method was called once assert calls == 1 jupyter_server-1.13.1/jupyter_server/tests/extension/test_config.py000066400000000000000000000026021415445537200260230ustar00rootroot00000000000000import pytest from jupyter_core.paths import jupyter_config_path from jupyter_server.extension.config import ( ExtensionConfigManager, ) # Use ServerApps environment because it monkeypatches # jupyter_core.paths and provides a config directory # that's not cross contaminating the user config directory. pytestmark = pytest.mark.usefixtures("jp_environ") @pytest.fixture def configd(jp_env_config_path): """A pathlib.Path object that acts like a jupyter_server_config.d folder.""" configd = jp_env_config_path.joinpath("jupyter_server_config.d") configd.mkdir() return configd ext1_json_config = """\ { "ServerApp": { "jpserver_extensions": { "ext1_config": true } } } """ @pytest.fixture def ext1_config(configd): config = configd.joinpath("ext1_config.json") config.write_text(ext1_json_config) ext2_json_config = """\ { "ServerApp": { "jpserver_extensions": { "ext2_config": false } } } """ @pytest.fixture def ext2_config(configd): config = configd.joinpath("ext2_config.json") config.write_text(ext2_json_config) def test_list_extension_from_configd(ext1_config, ext2_config): manager = ExtensionConfigManager(read_config_path=jupyter_config_path()) extensions = manager.get_jpserver_extensions() assert "ext2_config" in extensions assert "ext1_config" in extensions jupyter_server-1.13.1/jupyter_server/tests/extension/test_entrypoint.py000066400000000000000000000004741415445537200267760ustar00rootroot00000000000000import pytest # All test coroutines will be treated as marked. pytestmark = pytest.mark.script_launch_mode("subprocess") def test_server_extension_list(jp_environ, script_runner): ret = script_runner.run( "jupyter", "server", "extension", "list", ) assert ret.success jupyter_server-1.13.1/jupyter_server/tests/extension/test_handler.py000066400000000000000000000052111415445537200261720ustar00rootroot00000000000000import pytest @pytest.fixture def jp_server_config(jp_template_dir): return { "ServerApp": { "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True} }, "MockExtensionApp": {"template_paths": [str(jp_template_dir)]}, } async def test_handler(jp_fetch): r = await jp_fetch("mock", method="GET") assert r.code == 200 assert r.body.decode() == "mock trait" async def test_handler_template(jp_fetch, mock_template): r = await jp_fetch("mock_template", method="GET") assert r.code == 200 @pytest.mark.parametrize( "jp_server_config", [ { "ServerApp": { "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True} }, "MockExtensionApp": { # Change a trait in the MockExtensionApp using # the following config value. "mock_trait": "test mock trait" }, } ], ) async def test_handler_setting(jp_fetch, jp_server_config): # Test that the extension trait was picked up by the webapp. r = await jp_fetch("mock", method="GET") assert r.code == 200 assert r.body.decode() == "test mock trait" @pytest.mark.parametrize("jp_argv", (["--MockExtensionApp.mock_trait=test mock trait"],)) async def test_handler_argv(jp_fetch, jp_argv): # Test that the extension trait was picked up by the webapp. r = await jp_fetch("mock", method="GET") assert r.code == 200 assert r.body.decode() == "test mock trait" @pytest.mark.parametrize( "jp_server_config,jp_base_url", [ ( { "ServerApp": { "jpserver_extensions": {"jupyter_server.tests.extension.mockextensions": True}, # Move extension handlers behind a url prefix "base_url": "test_prefix", }, "MockExtensionApp": { # Change a trait in the MockExtensionApp using # the following config value. "mock_trait": "test mock trait" }, }, "/test_prefix/", ) ], ) async def test_base_url(jp_fetch, jp_server_config, jp_base_url): # Test that the extension's handlers were properly prefixed r = await jp_fetch("mock", method="GET") assert r.code == 200 assert r.body.decode() == "test mock trait" # Test that the static namespace was prefixed by base_url r = await jp_fetch("static", "mockextension", "mock.txt", method="GET") assert r.code == 200 body = r.body.decode() assert "mock static content" in body jupyter_server-1.13.1/jupyter_server/tests/extension/test_launch.py000066400000000000000000000047471415445537200260440ustar00rootroot00000000000000"""Test launching Jupyter Server Applications through as ExtensionApp launch_instance. """ import os import subprocess import sys import time from binascii import hexlify from pathlib import Path import pytest import requests HERE = os.path.dirname(os.path.abspath(__file__)) @pytest.fixture def port(): return 9999 @pytest.fixture def token(): return hexlify(os.urandom(4)).decode("ascii") @pytest.fixture def auth_header(token): return {"Authorization": "token %s" % token} def wait_up(url, interval=0.1, check=None): while True: try: r = requests.get(url) except Exception: if check: assert check() # print("waiting for %s" % url) time.sleep(interval) else: break @pytest.fixture def launch_instance(request, port, token): def _run_in_subprocess(argv=[], add_token=True): def _kill_extension_app(): try: process.terminate() except OSError: # Already dead. pass process.wait(10) if add_token: f'--ServerApp.token="{token}"', process = subprocess.Popen( [ sys.executable, "-m", "mockextensions.app", f"--port={port}", "--ip=127.0.0.1", "--no-browser", *argv, ], cwd=HERE, ) request.addfinalizer(_kill_extension_app) url = f"http://127.0.0.1:{port}" wait_up(url, check=lambda: process.poll() is None) return process return _run_in_subprocess @pytest.fixture def fetch(port, auth_header): def _get(endpoint): url = f"http://127.0.0.1:{port}" + endpoint return requests.get(url, headers=auth_header) return _get def test_launch_instance(launch_instance, fetch): launch_instance() r = fetch("/mock") assert r.status_code == 200 def test_base_url(launch_instance, fetch): launch_instance(["--ServerApp.base_url=/foo"]) r = fetch("/foo/mock") assert r.status_code == 200 def test_token_file(launch_instance, fetch, token): token_file = HERE / Path("token_file.txt") os.environ["JUPYTER_TOKEN_FILE"] = str(token_file) token_file.write_text(token, encoding="utf-8") launch_instance(add_token=False) r = fetch("/mock") del os.environ["JUPYTER_TOKEN_FILE"] token_file.unlink() assert r.status_code == 200 jupyter_server-1.13.1/jupyter_server/tests/extension/test_manager.py000066400000000000000000000113741415445537200261760ustar00rootroot00000000000000import os import unittest.mock as mock import pytest from jupyter_core.paths import jupyter_config_path from jupyter_server.extension.manager import ExtensionManager from jupyter_server.extension.manager import ExtensionMetadataError from jupyter_server.extension.manager import ExtensionModuleNotFound from jupyter_server.extension.manager import ExtensionPackage from jupyter_server.extension.manager import ExtensionPoint # Use ServerApps environment because it monkeypatches # jupyter_core.paths and provides a config directory # that's not cross contaminating the user config directory. pytestmark = pytest.mark.usefixtures("jp_environ") def test_extension_point_api(): # Import mock extension metadata from .mockextensions import _jupyter_server_extension_points # Testing the first path (which is an extension app). metadata_list = _jupyter_server_extension_points() point = metadata_list[0] module = point["module"] app = point["app"] e = ExtensionPoint(metadata=point) assert e.module_name == module assert e.name == app.name assert app is not None assert callable(e.load) assert callable(e.link) assert e.validate() def test_extension_point_metadata_error(): # Missing the "module" key. bad_metadata = {"name": "nonexistent"} with pytest.raises(ExtensionMetadataError): ExtensionPoint(metadata=bad_metadata) def test_extension_point_notfound_error(): bad_metadata = {"module": "nonexistent"} with pytest.raises(ExtensionModuleNotFound): ExtensionPoint(metadata=bad_metadata) def test_extension_package_api(): # Import mock extension metadata from .mockextensions import _jupyter_server_extension_points # Testing the first path (which is an extension app). metadata_list = _jupyter_server_extension_points() path1 = metadata_list[0] app = path1["app"] e = ExtensionPackage(name="jupyter_server.tests.extension.mockextensions") e.extension_points assert hasattr(e, "extension_points") assert len(e.extension_points) == len(metadata_list) assert app.name in e.extension_points assert e.validate() def test_extension_package_notfound_error(): with pytest.raises(ExtensionModuleNotFound): ExtensionPackage(name="nonexistent") def _normalize_path(path_list): return [p.rstrip(os.path.sep) for p in path_list] def test_extension_manager_api(jp_serverapp): jpserver_extensions = {"jupyter_server.tests.extension.mockextensions": True} manager = ExtensionManager(serverapp=jp_serverapp) assert manager.config_manager expected = _normalize_path(os.path.join(jupyter_config_path()[0], "serverconfig")) assert _normalize_path(manager.config_manager.read_config_path[0]) == expected manager.from_jpserver_extensions(jpserver_extensions) assert len(manager.extensions) == 1 assert "jupyter_server.tests.extension.mockextensions" in manager.extensions def test_extension_manager_linked_extensions(jp_serverapp): name = "jupyter_server.tests.extension.mockextensions" manager = ExtensionManager(serverapp=jp_serverapp) manager.add_extension(name, enabled=True) manager.link_extension(name) assert name in manager.linked_extensions def test_extension_manager_fail_add(jp_serverapp): name = "jupyter_server.tests.extension.notanextension" manager = ExtensionManager(serverapp=jp_serverapp) manager.add_extension(name, enabled=True) # should only warn jp_serverapp.reraise_server_extension_failures = True with pytest.raises(ExtensionModuleNotFound): manager.add_extension(name, enabled=True) def test_extension_manager_fail_link(jp_serverapp): name = "jupyter_server.tests.extension.mockextensions.app" with mock.patch( "jupyter_server.tests.extension.mockextensions.app.MockExtensionApp.parse_command_line", side_effect=RuntimeError, ): manager = ExtensionManager(serverapp=jp_serverapp) manager.add_extension(name, enabled=True) manager.link_extension(name) # should only warn jp_serverapp.reraise_server_extension_failures = True with pytest.raises(RuntimeError): manager.link_extension(name) def test_extension_manager_fail_load(jp_serverapp): name = "jupyter_server.tests.extension.mockextensions.app" with mock.patch( "jupyter_server.tests.extension.mockextensions.app.MockExtensionApp.initialize_handlers", side_effect=RuntimeError, ): manager = ExtensionManager(serverapp=jp_serverapp) manager.add_extension(name, enabled=True) manager.link_extension(name) manager.load_extension(name) # should only warn jp_serverapp.reraise_server_extension_failures = True with pytest.raises(RuntimeError): manager.load_extension(name) jupyter_server-1.13.1/jupyter_server/tests/extension/test_serverextension.py000066400000000000000000000077001415445537200300250ustar00rootroot00000000000000from collections import OrderedDict import pytest from traitlets.tests.utils import check_help_all_output from jupyter_server.config_manager import BaseJSONConfigManager from jupyter_server.extension.serverextension import _get_config_dir from jupyter_server.extension.serverextension import toggle_server_extension_python # Use ServerApps environment because it monkeypatches # jupyter_core.paths and provides a config directory # that's not cross contaminating the user config directory. pytestmark = pytest.mark.usefixtures("jp_environ") def test_help_output(): check_help_all_output("jupyter_server.extension.serverextension") check_help_all_output("jupyter_server.extension.serverextension", ["enable"]) check_help_all_output("jupyter_server.extension.serverextension", ["disable"]) check_help_all_output("jupyter_server.extension.serverextension", ["install"]) check_help_all_output("jupyter_server.extension.serverextension", ["uninstall"]) def get_config(sys_prefix=True): cm = BaseJSONConfigManager(config_dir=_get_config_dir(sys_prefix=sys_prefix)) data = cm.get("jupyter_server_config") return data.get("ServerApp", {}).get("jpserver_extensions", {}) def test_enable(jp_env_config_path, jp_extension_environ): toggle_server_extension_python("mock1", True) config = get_config() assert config["mock1"] def test_disable(jp_env_config_path, jp_extension_environ): toggle_server_extension_python("mock1", True) toggle_server_extension_python("mock1", False) config = get_config() assert not config["mock1"] def test_merge_config(jp_env_config_path, jp_configurable_serverapp, jp_extension_environ): # Toggle each extension module with a JSON config file # at the sys-prefix config dir. toggle_server_extension_python( "jupyter_server.tests.extension.mockextensions.mockext_sys", enabled=True, sys_prefix=True ) toggle_server_extension_python( "jupyter_server.tests.extension.mockextensions.mockext_user", enabled=True, user=True ) # Write this configuration in two places, sys-prefix and user. # sys-prefix supercedes users, so the extension should be disabled # when these two configs merge. toggle_server_extension_python( "jupyter_server.tests.extension.mockextensions.mockext_both", enabled=True, sys_prefix=True ) toggle_server_extension_python( "jupyter_server.tests.extension.mockextensions.mockext_both", enabled=False, user=True ) arg = "--ServerApp.jpserver_extensions={{'{mockext_py}': True}}".format( mockext_py="jupyter_server.tests.extension.mockextensions.mockext_py" ) # Enable the last extension, mockext_py, using the CLI interface. app = jp_configurable_serverapp(config_dir=str(jp_env_config_path), argv=[arg]) # Verify that extensions are enabled and merged in proper order. extensions = app.jpserver_extensions assert extensions["jupyter_server.tests.extension.mockextensions.mockext_user"] assert extensions["jupyter_server.tests.extension.mockextensions.mockext_sys"] assert extensions["jupyter_server.tests.extension.mockextensions.mockext_py"] # Merging should causes this extension to be disabled. assert not extensions["jupyter_server.tests.extension.mockextensions.mockext_both"] @pytest.mark.parametrize( "jp_server_config", [ { "ServerApp": { "jpserver_extensions": OrderedDict( [ ("jupyter_server.tests.extension.mockextensions.mock2", True), ("jupyter_server.tests.extension.mockextensions.mock1", True), ] ) } } ], ) def test_load_ordered(jp_serverapp, jp_server_config): assert jp_serverapp.mockII is True, "Mock II should have been loaded" assert jp_serverapp.mockI is True, "Mock I should have been loaded" assert jp_serverapp.mock_shared == "II", "Mock II should be loaded after Mock I" jupyter_server-1.13.1/jupyter_server/tests/extension/test_utils.py000066400000000000000000000014471415445537200257240ustar00rootroot00000000000000import pytest from jupyter_server.extension.utils import validate_extension # Use ServerApps environment because it monkeypatches # jupyter_core.paths and provides a config directory # that's not cross contaminating the user config directory. pytestmark = pytest.mark.usefixtures("jp_environ") def test_validate_extension(): # enabled at sys level assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_sys") # enabled at sys, disabled at user assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_both") # enabled at user assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_user") # enabled at Python assert validate_extension("jupyter_server.tests.extension.mockextensions.mockext_py") jupyter_server-1.13.1/jupyter_server/tests/namespace-package-test/000077500000000000000000000000001415445537200254335ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/namespace-package-test/README.md000066400000000000000000000001271415445537200267120ustar00rootroot00000000000000Blank namespace package for use in testing. https://www.python.org/dev/peps/pep-0420/ jupyter_server-1.13.1/jupyter_server/tests/namespace-package-test/setup.cfg000066400000000000000000000001171415445537200272530ustar00rootroot00000000000000[metadata] name = namespace-package-test [options] packages = find_namespace: jupyter_server-1.13.1/jupyter_server/tests/namespace-package-test/test_namespace/000077500000000000000000000000001415445537200304265ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/namespace-package-test/test_namespace/test_package/000077500000000000000000000000001415445537200330605ustar00rootroot00000000000000__init__.py000066400000000000000000000000001415445537200351000ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/namespace-package-test/test_namespace/test_packagejupyter_server-1.13.1/jupyter_server/tests/nbconvert/000077500000000000000000000000001415445537200231315ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/nbconvert/__init__.py000066400000000000000000000000001415445537200252300ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/nbconvert/test_handlers.py000066400000000000000000000077301415445537200263510ustar00rootroot00000000000000# coding: utf-8 import json from base64 import encodebytes from shutil import which import pytest import tornado from nbformat import writes from nbformat.v4 import new_code_cell from nbformat.v4 import new_markdown_cell from nbformat.v4 import new_notebook from nbformat.v4 import new_output from ..utils import expected_http_error png_green_pixel = encodebytes( b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00" b"\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT" b"\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82" ).decode("ascii") @pytest.fixture def notebook(jp_root_dir): # Build sub directory. subdir = jp_root_dir / "foo" if not jp_root_dir.joinpath("foo").is_dir(): subdir.mkdir() # Build a notebook programmatically. nb = new_notebook() nb.cells.append(new_markdown_cell(u"Created by test ³")) cc1 = new_code_cell(source=u"print(2*6)") cc1.outputs.append(new_output(output_type="stream", text=u"12")) cc1.outputs.append( new_output( output_type="execute_result", data={"image/png": png_green_pixel}, execution_count=1, ) ) nb.cells.append(cc1) # Write file to tmp dir. nbfile = subdir / "testnb.ipynb" nbfile.write_text(writes(nb, version=4), encoding="utf-8") pytestmark = pytest.mark.skipif(not which("pandoc"), reason="Command 'pandoc' is not available") async def test_from_file(jp_fetch, notebook): r = await jp_fetch( "nbconvert", "html", "foo", "testnb.ipynb", method="GET", params={"download": False} ) assert r.code == 200 assert "text/html" in r.headers["Content-Type"] assert "Created by test" in r.body.decode() assert "print" in r.body.decode() r = await jp_fetch( "nbconvert", "python", "foo", "testnb.ipynb", method="GET", params={"download": False} ) assert r.code == 200 assert "text/x-python" in r.headers["Content-Type"] assert "print(2*6)" in r.body.decode() async def test_from_file_404(jp_fetch, notebook): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "nbconvert", "html", "foo", "thisdoesntexist.ipynb", method="GET", params={"download": False}, ) assert expected_http_error(e, 404) async def test_from_file_download(jp_fetch, notebook): r = await jp_fetch( "nbconvert", "python", "foo", "testnb.ipynb", method="GET", params={"download": True} ) content_disposition = r.headers["Content-Disposition"] assert "attachment" in content_disposition assert "testnb.py" in content_disposition async def test_from_file_zip(jp_fetch, notebook): r = await jp_fetch( "nbconvert", "latex", "foo", "testnb.ipynb", method="GET", params={"download": True} ) assert "application/zip" in r.headers["Content-Type"] assert ".zip" in r.headers["Content-Disposition"] async def test_from_post(jp_fetch, notebook): r = await jp_fetch( "api/contents/foo/testnb.ipynb", method="GET", ) nbmodel = json.loads(r.body.decode()) r = await jp_fetch("nbconvert", "html", method="POST", body=json.dumps(nbmodel)) assert r.code == 200 assert "text/html" in r.headers["Content-Type"] assert "Created by test" in r.body.decode() assert "print" in r.body.decode() r = await jp_fetch("nbconvert", "python", method="POST", body=json.dumps(nbmodel)) assert r.code == 200 assert u"text/x-python" in r.headers["Content-Type"] assert "print(2*6)" in r.body.decode() async def test_from_post_zip(jp_fetch, notebook): r = await jp_fetch( "api/contents/foo/testnb.ipynb", method="GET", ) nbmodel = json.loads(r.body.decode()) r = await jp_fetch("nbconvert", "latex", method="POST", body=json.dumps(nbmodel)) assert "application/zip" in r.headers["Content-Type"] assert ".zip" in r.headers["Content-Disposition"] jupyter_server-1.13.1/jupyter_server/tests/services/000077500000000000000000000000001415445537200227545ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/__init__.py000066400000000000000000000000001415445537200250530ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/api/000077500000000000000000000000001415445537200235255ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/api/__init__.py000066400000000000000000000000001415445537200256240ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/api/test_api.py000066400000000000000000000012671415445537200257150ustar00rootroot00000000000000import json async def test_get_spec(jp_fetch): response = await jp_fetch("api", "spec.yaml", method="GET") assert response.code == 200 async def test_get_status(jp_fetch): response = await jp_fetch("api", "status", method="GET") assert response.code == 200 assert response.headers.get("Content-Type") == "application/json" status = json.loads(response.body.decode("utf8")) assert sorted(status.keys()) == [ "connections", "kernels", "last_activity", "started", ] assert status["connections"] == 0 assert status["kernels"] == 0 assert status["last_activity"].endswith("Z") assert status["started"].endswith("Z") jupyter_server-1.13.1/jupyter_server/tests/services/config/000077500000000000000000000000001415445537200242215ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/config/__init__.py000066400000000000000000000000001415445537200263200ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/config/test_api.py000066400000000000000000000025321415445537200264050ustar00rootroot00000000000000import json async def test_create_retrieve_config(jp_fetch): sample = {"foo": "bar", "baz": 73} response = await jp_fetch("api", "config", "example", method="PUT", body=json.dumps(sample)) assert response.code == 204 response2 = await jp_fetch( "api", "config", "example", method="GET", ) assert response2.code == 200 assert json.loads(response2.body.decode()) == sample async def test_modify(jp_fetch): sample = {"foo": "bar", "baz": 73, "sub": {"a": 6, "b": 7}, "sub2": {"c": 8}} modified_sample = { "foo": None, # should delete foo "baz": 75, "wib": [1, 2, 3], "sub": {"a": 8, "b": None, "d": 9}, "sub2": {"c": None}, # should delete sub2 } diff = {"baz": 75, "wib": [1, 2, 3], "sub": {"a": 8, "d": 9}} await jp_fetch("api", "config", "example", method="PUT", body=json.dumps(sample)) response2 = await jp_fetch( "api", "config", "example", method="PATCH", body=json.dumps(modified_sample) ) assert response2.code == 200 assert json.loads(response2.body.decode()) == diff async def test_get_unknown(jp_fetch): response = await jp_fetch( "api", "config", "nonexistant", method="GET", ) assert response.code == 200 assert json.loads(response.body.decode()) == {} jupyter_server-1.13.1/jupyter_server/tests/services/contents/000077500000000000000000000000001415445537200246115ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/contents/__init__.py000066400000000000000000000000001415445537200267100ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/contents/test_api.py000066400000000000000000000571471415445537200270110ustar00rootroot00000000000000import json import pathlib import sys from base64 import decodebytes from base64 import encodebytes from unicodedata import normalize import pytest import tornado from nbformat import from_dict from nbformat import writes from nbformat.v4 import new_markdown_cell from nbformat.v4 import new_notebook from ...utils import expected_http_error from jupyter_server.utils import url_path_join def notebooks_only(dir_model): return [nb for nb in dir_model["content"] if nb["type"] == "notebook"] def dirs_only(dir_model): return [x for x in dir_model["content"] if x["type"] == "directory"] dirs = [ ("", "inroot"), ("Directory with spaces in", "inspace"), (u"unicodé", "innonascii"), ("foo", "a"), ("foo", "b"), ("foo", "name with spaces"), ("foo", u"unicodé"), ("foo/bar", "baz"), ("ordering", "A"), ("ordering", "b"), ("ordering", "C"), (u"å b", u"ç d"), ] @pytest.fixture(params=["FileContentsManager", "AsyncFileContentsManager"]) def jp_argv(request): return [ "--ServerApp.contents_manager_class=jupyter_server.services.contents.filemanager." + request.param ] @pytest.fixture def contents_dir(tmp_path, jp_serverapp): return tmp_path / jp_serverapp.root_dir @pytest.fixture def contents(contents_dir): # Create files in temporary directory paths = { "notebooks": [], "textfiles": [], "blobs": [], } for d, name in dirs: p = contents_dir / d p.mkdir(parents=True, exist_ok=True) # Create a notebook nb = writes(new_notebook(), version=4) nbname = p.joinpath("{}.ipynb".format(name)) nbname.write_text(nb, encoding="utf-8") paths["notebooks"].append(nbname.relative_to(contents_dir)) # Create a text file txt = "{} text file".format(name) txtname = p.joinpath("{}.txt".format(name)) txtname.write_text(txt, encoding="utf-8") paths["textfiles"].append(txtname.relative_to(contents_dir)) # Create a random blob blob = name.encode("utf-8") + b"\xFF" blobname = p.joinpath("{}.blob".format(name)) blobname.write_bytes(blob) paths["blobs"].append(blobname.relative_to(contents_dir)) paths["all"] = list(paths.values()) return paths @pytest.fixture def folders(): return list(set(item[0] for item in dirs)) @pytest.mark.parametrize("path,name", dirs) async def test_list_notebooks(jp_fetch, contents, path, name): response = await jp_fetch( "api", "contents", path, method="GET", ) data = json.loads(response.body.decode()) nbs = notebooks_only(data) assert len(nbs) > 0 assert name + ".ipynb" in [normalize("NFC", n["name"]) for n in nbs] assert url_path_join(path, name + ".ipynb") in [normalize("NFC", n["path"]) for n in nbs] @pytest.mark.parametrize("path,name", dirs) async def test_get_dir_no_contents(jp_fetch, contents, path, name): response = await jp_fetch( "api", "contents", path, method="GET", params=dict( content="0", ), ) model = json.loads(response.body.decode()) assert model["path"] == path assert model["type"] == "directory" assert "content" in model assert model["content"] is None async def test_list_nonexistant_dir(jp_fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError): await jp_fetch( "api", "contents", "nonexistant", method="GET", ) @pytest.mark.parametrize("path,name", dirs) async def test_get_nb_contents(jp_fetch, contents, path, name): nbname = name + ".ipynb" nbpath = (path + "/" + nbname).lstrip("/") r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) assert model["name"] == nbname assert model["path"] == nbpath assert model["type"] == "notebook" assert "content" in model assert model["format"] == "json" assert "metadata" in model["content"] assert isinstance(model["content"]["metadata"], dict) @pytest.mark.parametrize("path,name", dirs) async def test_get_nb_no_contents(jp_fetch, contents, path, name): nbname = name + ".ipynb" nbpath = (path + "/" + nbname).lstrip("/") r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(content="0")) model = json.loads(r.body.decode()) assert model["name"] == nbname assert model["path"] == nbpath assert model["type"] == "notebook" assert "content" in model assert model["content"] is None async def test_get_nb_invalid(contents_dir, jp_fetch, contents): nb = { "nbformat": 4, "metadata": {}, "cells": [ { "cell_type": "wrong", "metadata": {}, } ], } nbpath = u"å b/Validate tést.ipynb" (contents_dir / nbpath).write_text(json.dumps(nb)) r = await jp_fetch( "api", "contents", nbpath, method="GET", ) model = json.loads(r.body.decode()) assert model["path"] == nbpath assert model["type"] == "notebook" assert "content" in model assert "message" in model assert "validation failed" in model["message"].lower() async def test_get_contents_no_such_file(jp_fetch): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", "foo/q.ipynb", method="GET", ) assert e.value.code == 404 @pytest.mark.parametrize("path,name", dirs) async def test_get_text_file_contents(jp_fetch, contents, path, name): txtname = name + ".txt" txtpath = (path + "/" + txtname).lstrip("/") r = await jp_fetch("api", "contents", txtpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) assert model["name"] == txtname assert model["path"] == txtpath assert "content" in model assert model["format"] == "text" assert model["type"] == "file" assert model["content"] == "{} text file".format(name) with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", "foo/q.txt", method="GET", ) assert expected_http_error(e, 404) with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", "foo/bar/baz.blob", method="GET", params=dict(type="file", format="text"), ) assert expected_http_error(e, 400) @pytest.mark.parametrize("path,name", dirs) async def test_get_binary_file_contents(jp_fetch, contents, path, name): blobname = name + ".blob" blobpath = (path + "/" + blobname).lstrip("/") r = await jp_fetch("api", "contents", blobpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) assert model["name"] == blobname assert model["path"] == blobpath assert "content" in model assert model["format"] == "base64" assert model["type"] == "file" data_out = decodebytes(model["content"].encode("ascii")) data_in = name.encode("utf-8") + b"\xFF" assert data_in == data_out with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", "foo/q.txt", method="GET", ) assert expected_http_error(e, 404) async def test_get_bad_type(jp_fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError) as e: path = "unicodé" type = "file" await jp_fetch( "api", "contents", path, method="GET", params=dict(type=type), # This should be a directory, and thus throw and error ) assert expected_http_error(e, 400, "%s is a directory, not a %s" % (path, type)) with pytest.raises(tornado.httpclient.HTTPClientError) as e: path = "unicodé/innonascii.ipynb" type = "directory" await jp_fetch( "api", "contents", path, method="GET", params=dict(type=type), # This should be a file, and thus throw and error ) assert expected_http_error(e, 400, "%s is not a directory" % path) @pytest.fixture def _check_created(jp_base_url): def _inner(r, contents_dir, path, name, type="notebook"): fpath = path + "/" + name assert r.code == 201 location = jp_base_url + "api/contents/" + tornado.escape.url_escape(fpath, plus=False) assert r.headers["Location"] == location model = json.loads(r.body.decode()) assert model["name"] == name assert model["path"] == fpath assert model["type"] == type path = contents_dir + "/" + fpath if type == "directory": assert pathlib.Path(path).is_dir() else: assert pathlib.Path(path).is_file() return _inner async def test_create_untitled(jp_fetch, contents, contents_dir, _check_created): path = "å b" name = "Untitled.ipynb" r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) _check_created(r, str(contents_dir), path, name, type="notebook") name = "Untitled1.ipynb" r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) _check_created(r, str(contents_dir), path, name, type="notebook") path = "foo/bar" name = "Untitled.ipynb" r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"})) _check_created(r, str(contents_dir), path, name, type="notebook") async def test_create_untitled_txt(jp_fetch, contents, contents_dir, _check_created): name = "untitled.txt" path = "foo/bar" r = await jp_fetch("api", "contents", path, method="POST", body=json.dumps({"ext": ".txt"})) _check_created(r, str(contents_dir), path, name, type="file") r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) assert model["type"] == "file" assert model["format"] == "text" assert model["content"] == "" async def test_upload(jp_fetch, contents, contents_dir, _check_created): nb = new_notebook() nbmodel = {"content": nb, "type": "notebook"} path = "å b" name = "Upload tést.ipynb" r = await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(nbmodel)) _check_created(r, str(contents_dir), path, name) async def test_mkdir_untitled(jp_fetch, contents, contents_dir, _check_created): name = "Untitled Folder" path = "å b" r = await jp_fetch( "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) _check_created(r, str(contents_dir), path, name, type="directory") name = "Untitled Folder 1" r = await jp_fetch( "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) _check_created(r, str(contents_dir), path, name, type="directory") name = "Untitled Folder" path = "foo/bar" r = await jp_fetch( "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) _check_created(r, str(contents_dir), path, name, type="directory") async def test_mkdir(jp_fetch, contents, contents_dir, _check_created): name = "New ∂ir" path = "å b" r = await jp_fetch( "api", "contents", path, name, method="PUT", body=json.dumps({"type": "directory"}) ) _check_created(r, str(contents_dir), path, name, type="directory") async def test_mkdir_hidden_400(jp_fetch): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", "å b/.hidden", method="PUT", body=json.dumps({"type": "directory"}) ) assert expected_http_error(e, 400) async def test_upload_txt(jp_fetch, contents, contents_dir, _check_created): body = "ünicode téxt" model = { "content": body, "format": "text", "type": "file", } path = "å b" name = "Upload tést.txt" await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) # check roundtrip r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) assert model["type"] == "file" assert model["format"] == "text" assert model["path"] == path + "/" + name assert model["content"] == body async def test_upload_b64(jp_fetch, contents, contents_dir, _check_created): body = b"\xFFblob" b64body = encodebytes(body).decode("ascii") model = { "content": b64body, "format": "base64", "type": "file", } path = "å b" name = "Upload tést.blob" await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) # check roundtrip r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) assert model["type"] == "file" assert model["path"] == path + "/" + name assert model["format"] == "base64" decoded = decodebytes(model["content"].encode("ascii")) assert decoded == body async def test_copy(jp_fetch, contents, contents_dir, _check_created): path = "å b" name = "ç d.ipynb" copy = "ç d-Copy1.ipynb" r = await jp_fetch( "api", "contents", path, method="POST", body=json.dumps({"copy_from": path + "/" + name}) ) _check_created(r, str(contents_dir), path, copy, type="notebook") # Copy the same file name copy2 = "ç d-Copy2.ipynb" r = await jp_fetch( "api", "contents", path, method="POST", body=json.dumps({"copy_from": path + "/" + name}) ) _check_created(r, str(contents_dir), path, copy2, type="notebook") # copy a copy. copy3 = "ç d-Copy3.ipynb" r = await jp_fetch( "api", "contents", path, method="POST", body=json.dumps({"copy_from": path + "/" + copy2}) ) _check_created(r, str(contents_dir), path, copy3, type="notebook") async def test_copy_path(jp_fetch, contents, contents_dir, _check_created): path1 = "foo" path2 = "å b" name = "a.ipynb" copy = "a-Copy1.ipynb" r = await jp_fetch( "api", "contents", path2, method="POST", body=json.dumps({"copy_from": path1 + "/" + name}) ) _check_created(r, str(contents_dir), path2, name, type="notebook") r = await jp_fetch( "api", "contents", path2, method="POST", body=json.dumps({"copy_from": path1 + "/" + name}) ) _check_created(r, str(contents_dir), path2, copy, type="notebook") async def test_copy_put_400(jp_fetch, contents, contents_dir, _check_created): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", "å b/cøpy.ipynb", method="PUT", body=json.dumps({"copy_from": "å b/ç d.ipynb"}), ) assert expected_http_error(e, 400) async def test_copy_dir_400(jp_fetch, contents, contents_dir, _check_created): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch( "api", "contents", "foo", method="POST", body=json.dumps({"copy_from": "å b"}) ) assert expected_http_error(e, 400) @pytest.mark.parametrize("path,name", dirs) async def test_delete(jp_fetch, contents, contents_dir, path, name, _check_created): nbname = name + ".ipynb" nbpath = (path + "/" + nbname).lstrip("/") r = await jp_fetch( "api", "contents", nbpath, method="DELETE", ) assert r.code == 204 async def test_delete_dirs(jp_fetch, contents, folders): # Iterate over folders for name in sorted(folders + ["/"], key=len, reverse=True): r = await jp_fetch("api", "contents", name, method="GET") # Get JSON blobs for each content. listing = json.loads(r.body.decode())["content"] # Delete all content for model in listing: await jp_fetch("api", "contents", model["path"], method="DELETE") # Make sure all content has been deleted. r = await jp_fetch("api", "contents", method="GET") model = json.loads(r.body.decode()) assert model["content"] == [] @pytest.mark.skipif(sys.platform == "win32", reason="Disabled deleting non-empty dirs on Windows") async def test_delete_non_empty_dir(jp_fetch, contents): # Delete a folder await jp_fetch("api", "contents", "å b", method="DELETE") # Check that the folder was been deleted. with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "contents", "å b", method="GET") assert expected_http_error(e, 404) async def test_rename(jp_fetch, jp_base_url, contents, contents_dir): path = "foo" name = "a.ipynb" new_name = "z.ipynb" # Rename the file r = await jp_fetch( "api", "contents", path, name, method="PATCH", body=json.dumps({"path": path + "/" + new_name}), ) fpath = path + "/" + new_name assert r.code == 200 location = url_path_join(jp_base_url, "api/contents/", fpath) assert r.headers["Location"] == location model = json.loads(r.body.decode()) assert model["name"] == new_name assert model["path"] == fpath fpath = str(contents_dir / fpath) assert pathlib.Path(fpath).is_file() # Check that the files have changed r = await jp_fetch("api", "contents", path, method="GET") listing = json.loads(r.body.decode()) nbnames = [name["name"] for name in listing["content"]] assert "z.ipynb" in nbnames assert "a.ipynb" not in nbnames async def test_checkpoints_follow_file(jp_fetch, contents): path = "foo" name = "a.ipynb" # Read initial file. r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) # Create a checkpoint of initial state r = await jp_fetch( "api", "contents", path, name, "checkpoints", method="POST", allow_nonstandard_methods=True ) cp1 = json.loads(r.body.decode()) # Modify file and save. nbcontent = model["content"] nb = from_dict(nbcontent) hcell = new_markdown_cell("Created by test") nb.cells.append(hcell) nbmodel = {"content": nb, "type": "notebook"} await jp_fetch("api", "contents", path, name, method="PUT", body=json.dumps(nbmodel)) # List checkpoints r = await jp_fetch( "api", "contents", path, name, "checkpoints", method="GET", ) cps = json.loads(r.body.decode()) assert cps == [cp1] r = await jp_fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) nbcontent = model["content"] nb = from_dict(nbcontent) assert nb.cells[0].source == "Created by test" async def test_rename_existing(jp_fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError) as e: path = "foo" name = "a.ipynb" new_name = "b.ipynb" # Rename the file await jp_fetch( "api", "contents", path, name, method="PATCH", body=json.dumps({"path": path + "/" + new_name}), ) assert expected_http_error(e, 409) async def test_save(jp_fetch, contents): r = await jp_fetch("api", "contents", "foo/a.ipynb", method="GET") model = json.loads(r.body.decode()) nbmodel = model["content"] nb = from_dict(nbmodel) nb.cells.append(new_markdown_cell("Created by test ³")) nbmodel = {"content": nb, "type": "notebook"} await jp_fetch("api", "contents", "foo/a.ipynb", method="PUT", body=json.dumps(nbmodel)) # Round trip. r = await jp_fetch("api", "contents", "foo/a.ipynb", method="GET") model = json.loads(r.body.decode()) newnb = from_dict(model["content"]) assert newnb.cells[0].source == "Created by test ³" async def test_checkpoints(jp_fetch, contents): path = "foo/a.ipynb" resp = await jp_fetch("api", "contents", path, method="GET") model = json.loads(resp.body.decode()) r = await jp_fetch( "api", "contents", path, "checkpoints", method="POST", allow_nonstandard_methods=True ) assert r.code == 201 cp1 = json.loads(r.body.decode()) assert set(cp1) == {"id", "last_modified"} assert r.headers["Location"].split("/")[-1] == cp1["id"] # Modify it. nbcontent = model["content"] nb = from_dict(nbcontent) hcell = new_markdown_cell("Created by test") nb.cells.append(hcell) # Save it. nbmodel = {"content": nb, "type": "notebook"} await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(nbmodel)) # List checkpoints r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [cp1] r = await jp_fetch("api", "contents", path, method="GET") nbcontent = json.loads(r.body.decode())["content"] nb = from_dict(nbcontent) assert nb.cells[0].source == "Created by test" # Restore Checkpoint cp1 r = await jp_fetch( "api", "contents", path, "checkpoints", cp1["id"], method="POST", allow_nonstandard_methods=True, ) assert r.code == 204 r = await jp_fetch("api", "contents", path, method="GET") nbcontent = json.loads(r.body.decode())["content"] nb = from_dict(nbcontent) assert nb.cells == [] # Delete cp1 r = await jp_fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") assert r.code == 204 r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [] async def test_file_checkpoints(jp_fetch, contents): path = "foo/a.txt" resp = await jp_fetch("api", "contents", path, method="GET") orig_content = json.loads(resp.body.decode())["content"] r = await jp_fetch( "api", "contents", path, "checkpoints", method="POST", allow_nonstandard_methods=True ) assert r.code == 201 cp1 = json.loads(r.body.decode()) assert set(cp1) == {"id", "last_modified"} assert r.headers["Location"].split("/")[-1] == cp1["id"] # Modify it. new_content = orig_content + "\nsecond line" model = { "content": new_content, "type": "file", "format": "text", } # Save it. await jp_fetch("api", "contents", path, method="PUT", body=json.dumps(model)) # List checkpoints r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [cp1] r = await jp_fetch("api", "contents", path, method="GET") content = json.loads(r.body.decode())["content"] assert content == new_content # Restore Checkpoint cp1 r = await jp_fetch( "api", "contents", path, "checkpoints", cp1["id"], method="POST", allow_nonstandard_methods=True, ) assert r.code == 204 r = await jp_fetch("api", "contents", path, method="GET") restored_content = json.loads(r.body.decode())["content"] assert restored_content == orig_content # Delete cp1 r = await jp_fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") assert r.code == 204 r = await jp_fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [] async def test_trust(jp_fetch, contents): # It should be able to trust a notebook that exists for path in contents["notebooks"]: r = await jp_fetch( "api", "contents", str(path), "trust", method="POST", allow_nonstandard_methods=True ) assert r.code == 201 jupyter_server-1.13.1/jupyter_server/tests/services/contents/test_config.py000066400000000000000000000021531415445537200274700ustar00rootroot00000000000000import pytest from jupyter_server.services.contents.checkpoints import AsyncCheckpoints from jupyter_server.services.contents.filecheckpoints import AsyncGenericFileCheckpoints from jupyter_server.services.contents.filecheckpoints import GenericFileCheckpoints from jupyter_server.services.contents.manager import AsyncContentsManager @pytest.fixture(params=[AsyncGenericFileCheckpoints, GenericFileCheckpoints]) def jp_server_config(request): return {"FileContentsManager": {"checkpoints_class": request.param}} def test_config_did_something(jp_server_config, jp_serverapp): assert isinstance( jp_serverapp.contents_manager.checkpoints, jp_server_config["FileContentsManager"]["checkpoints_class"], ) async def test_async_contents_manager(jp_configurable_serverapp): config = {"ContentsManager": {"checkpoints_class": AsyncCheckpoints}} argv = [ "--ServerApp.contents_manager_class=jupyter_server.services.contents.manager.AsyncContentsManager" ] app = jp_configurable_serverapp(config=config, argv=argv) assert isinstance(app.contents_manager, AsyncContentsManager) jupyter_server-1.13.1/jupyter_server/tests/services/contents/test_fileio.py000066400000000000000000000066601415445537200275010ustar00rootroot00000000000000import functools import io import os import stat import sys import decorator import pytest from ipython_genutils.testing.decorators import skip_win32 as _skip_win32 from jupyter_server.services.contents.fileio import atomic_writing @functools.wraps(_skip_win32) def skip_win32(f): # Patches the "skip_win32" method to allow pytest fixtures # in methods wrapped by this decorator. def inner(f, *args, **kwargs): decorated_f = _skip_win32(f) return decorated_f(*args, **kwargs) return decorator.decorator(inner, f) umask = 0 def test_atomic_writing(tmp_path): class CustomExc(Exception): pass f1 = tmp_path / "penguin" f1.write_text("Before") if os.name != "nt": os.chmod(str(f1), 0o701) orig_mode = stat.S_IMODE(os.stat(str(f1)).st_mode) f2 = tmp_path / "flamingo" try: os.symlink(str(f1), str(f2)) have_symlink = True except (AttributeError, NotImplementedError, OSError): # AttributeError: Python doesn't support it # NotImplementedError: The system doesn't support it # OSError: The user lacks the privilege (Windows) have_symlink = False with pytest.raises(CustomExc): with atomic_writing(str(f1)) as f: f.write("Failing write") raise CustomExc with io.open(str(f1), "r") as f: assert f.read() == "Before" with atomic_writing(str(f1)) as f: f.write("Overwritten") with io.open(str(f1), "r") as f: assert f.read() == "Overwritten" if os.name != "nt": mode = stat.S_IMODE(os.stat(str(f1)).st_mode) assert mode == orig_mode if have_symlink: # Check that writing over a file preserves a symlink with atomic_writing(str(f2)) as f: f.write(u"written from symlink") with io.open(str(f1), "r") as f: assert f.read() == "written from symlink" @pytest.fixture def handle_umask(): global umask umask = os.umask(0) os.umask(umask) yield os.umask(umask) @pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows") def test_atomic_writing_umask(handle_umask, tmp_path): os.umask(0o022) f1 = str(tmp_path / "1") with atomic_writing(f1) as f: f.write("1") mode = stat.S_IMODE(os.stat(f1).st_mode) assert mode == 0o644 os.umask(0o057) f2 = str(tmp_path / "2") with atomic_writing(f2) as f: f.write("2") mode = stat.S_IMODE(os.stat(f2).st_mode) assert mode == 0o620 def test_atomic_writing_newlines(tmp_path): path = str(tmp_path / "testfile") lf = u"a\nb\nc\n" plat = lf.replace(u"\n", os.linesep) crlf = lf.replace(u"\n", u"\r\n") # test default with io.open(path, "w") as f: f.write(lf) with io.open(path, "r", newline="") as f: read = f.read() assert read == plat # test newline=LF with io.open(path, "w", newline="\n") as f: f.write(lf) with io.open(path, "r", newline="") as f: read = f.read() assert read == lf # test newline=CRLF with atomic_writing(str(path), newline="\r\n") as f: f.write(lf) with io.open(path, "r", newline="") as f: read = f.read() assert read == crlf # test newline=no convert text = u"crlf\r\ncr\rlf\n" with atomic_writing(str(path), newline="") as f: f.write(text) with io.open(path, "r", newline="") as f: read = f.read() assert read == text jupyter_server-1.13.1/jupyter_server/tests/services/contents/test_largefilemanager.py000066400000000000000000000071131415445537200315110ustar00rootroot00000000000000import pytest import tornado from ...utils import expected_http_error from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager from jupyter_server.services.contents.largefilemanager import LargeFileManager from jupyter_server.utils import ensure_async @pytest.fixture(params=[LargeFileManager, AsyncLargeFileManager]) def jp_large_contents_manager(request, tmp_path): """Returns a LargeFileManager instance.""" file_manager = request.param return file_manager(root_dir=str(tmp_path)) async def test_save(jp_large_contents_manager): cm = jp_large_contents_manager model = await ensure_async(cm.new_untitled(type="notebook")) name = model["name"] path = model["path"] # Get the model with 'content' full_model = await ensure_async(cm.get(path)) # Save the notebook model = await ensure_async(cm.save(full_model, path)) assert isinstance(model, dict) assert "name" in model assert "path" in model assert model["name"] == name assert model["path"] == path @pytest.mark.parametrize( "model,err_message", [ ( {"name": "test", "path": "test", "chunk": 1}, "HTTP 400: Bad Request (No file type provided)", ), ( {"name": "test", "path": "test", "chunk": 1, "type": "notebook"}, 'HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)', ), ( {"name": "test", "path": "test", "chunk": 1, "type": "file"}, "HTTP 400: Bad Request (No file content provided)", ), ( { "name": "test", "path": "test", "chunk": 2, "type": "file", "content": u"test", "format": "json", }, "HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')", ), ], ) async def test_bad_save(jp_large_contents_manager, model, err_message): with pytest.raises(tornado.web.HTTPError) as e: await ensure_async(jp_large_contents_manager.save(model, model["path"])) assert expected_http_error(e, 400, expected_message=err_message) async def test_saving_different_chunks(jp_large_contents_manager): cm = jp_large_contents_manager model = {"name": "test", "path": "test", "type": "file", "content": u"test==", "format": "text"} name = model["name"] path = model["path"] await ensure_async(cm.save(model, path)) for chunk in (1, 2, -1): for fm in ("text", "base64"): full_model = await ensure_async(cm.get(path)) full_model["chunk"] = chunk full_model["format"] = fm model_res = await ensure_async(cm.save(full_model, path)) assert isinstance(model_res, dict) assert "name" in model_res assert "path" in model_res assert "chunk" not in model_res assert model_res["name"] == name assert model_res["path"] == path async def test_save_in_subdirectory(jp_large_contents_manager, tmp_path): cm = jp_large_contents_manager sub_dir = tmp_path / "foo" sub_dir.mkdir() model = await ensure_async(cm.new_untitled(path="/foo/", type="notebook")) path = model["path"] model = await ensure_async(cm.get(path)) # Change the name in the model for rename model = await ensure_async(cm.save(model, path)) assert isinstance(model, dict) assert "name" in model assert "path" in model assert model["name"] == "Untitled.ipynb" assert model["path"] == "foo/Untitled.ipynb" jupyter_server-1.13.1/jupyter_server/tests/services/contents/test_manager.py000066400000000000000000000544331415445537200276450ustar00rootroot00000000000000import os import sys import time from itertools import combinations import pytest from nbformat import v4 as nbformat from tornado.web import HTTPError from traitlets import TraitError from ...utils import expected_http_error from jupyter_server.services.contents.filemanager import AsyncFileContentsManager from jupyter_server.services.contents.filemanager import FileContentsManager from jupyter_server.utils import ensure_async @pytest.fixture( params=[ (FileContentsManager, True), (FileContentsManager, False), (AsyncFileContentsManager, True), (AsyncFileContentsManager, False), ] ) def jp_contents_manager(request, tmp_path): contents_manager, use_atomic_writing = request.param return contents_manager(root_dir=str(tmp_path), use_atomic_writing=use_atomic_writing) @pytest.fixture(params=[FileContentsManager, AsyncFileContentsManager]) def jp_file_contents_manager_class(request, tmp_path): return request.param # -------------- Functions ---------------------------- def _make_dir(jp_contents_manager, api_path): """ Make a directory. """ os_path = jp_contents_manager._get_os_path(api_path) try: os.makedirs(os_path) except OSError: print("Directory already exists: %r" % os_path) def symlink(jp_contents_manager, src, dst): """Make a symlink to src from dst src and dst are api_paths """ src_os_path = jp_contents_manager._get_os_path(src) dst_os_path = jp_contents_manager._get_os_path(dst) print(src_os_path, dst_os_path, os.path.isfile(src_os_path)) os.symlink(src_os_path, dst_os_path) def add_code_cell(notebook): output = nbformat.new_output("display_data", {"application/javascript": "alert('hi');"}) cell = nbformat.new_code_cell("print('hi')", outputs=[output]) notebook.cells.append(cell) async def new_notebook(jp_contents_manager): cm = jp_contents_manager model = await ensure_async(cm.new_untitled(type="notebook")) name = model["name"] path = model["path"] full_model = await ensure_async(cm.get(path)) nb = full_model["content"] nb["metadata"]["counter"] = int(1e6 * time.time()) add_code_cell(nb) await ensure_async(cm.save(full_model, path)) return nb, name, path async def make_populated_dir(jp_contents_manager, api_path): cm = jp_contents_manager _make_dir(cm, api_path) await ensure_async(cm.new(path="/".join([api_path, "nb.ipynb"]))) await ensure_async(cm.new(path="/".join([api_path, "file.txt"]))) async def check_populated_dir_files(jp_contents_manager, api_path): dir_model = await ensure_async(jp_contents_manager.get(api_path)) assert dir_model["path"] == api_path assert dir_model["type"] == "directory" for entry in dir_model["content"]: if entry["type"] == "directory": continue elif entry["type"] == "file": assert entry["name"] == "file.txt" complete_path = "/".join([api_path, "file.txt"]) assert entry["path"] == complete_path elif entry["type"] == "notebook": assert entry["name"] == "nb.ipynb" complete_path = "/".join([api_path, "nb.ipynb"]) assert entry["path"] == complete_path # ----------------- Tests ---------------------------------- def test_root_dir(jp_file_contents_manager_class, tmp_path): fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) assert fm.root_dir == str(tmp_path) def test_missing_root_dir(jp_file_contents_manager_class, tmp_path): root = tmp_path / "notebook" / "dir" / "is" / "missing" with pytest.raises(TraitError): jp_file_contents_manager_class(root_dir=str(root)) def test_invalid_root_dir(jp_file_contents_manager_class, tmp_path): temp_file = tmp_path / "file.txt" temp_file.write_text("") with pytest.raises(TraitError): jp_file_contents_manager_class(root_dir=str(temp_file)) def test_get_os_path(jp_file_contents_manager_class, tmp_path): fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) path = fm._get_os_path("/path/to/notebook/test.ipynb") rel_path_list = "/path/to/notebook/test.ipynb".split("/") fs_path = os.path.join(fm.root_dir, *rel_path_list) assert path == fs_path fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) path = fm._get_os_path("test.ipynb") fs_path = os.path.join(fm.root_dir, "test.ipynb") assert path == fs_path fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) path = fm._get_os_path("////test.ipynb") fs_path = os.path.join(fm.root_dir, "test.ipynb") assert path == fs_path def test_checkpoint_subdir(jp_file_contents_manager_class, tmp_path): subd = "sub ∂ir" cp_name = "test-cp.ipynb" fm = jp_file_contents_manager_class(root_dir=str(tmp_path)) tmp_path.joinpath(subd).mkdir() cpm = fm.checkpoints cp_dir = cpm.checkpoint_path("cp", "test.ipynb") cp_subdir = cpm.checkpoint_path("cp", "/%s/test.ipynb" % subd) assert cp_dir != cp_subdir assert cp_dir == os.path.join(str(tmp_path), cpm.checkpoint_dir, cp_name) async def test_bad_symlink(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) path = "test bad symlink" _make_dir(cm, path) file_model = await ensure_async(cm.new_untitled(path=path, ext=".txt")) # create a broken symlink symlink(cm, "target", "%s/%s" % (path, "bad symlink")) model = await ensure_async(cm.get(path)) contents = {content["name"]: content for content in model["content"]} assert "untitled.txt" in contents assert contents["untitled.txt"] == file_model assert "bad symlink" in contents @pytest.mark.skipif(sys.platform.startswith("win"), reason="Windows doesn't detect symlink loops") async def test_recursive_symlink(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) path = "test recursive symlink" _make_dir(cm, path) file_model = await ensure_async(cm.new_untitled(path=path, ext=".txt")) # create recursive symlink symlink(cm, "%s/%s" % (path, "recursive"), "%s/%s" % (path, "recursive")) model = await ensure_async(cm.get(path)) contents = {content["name"]: content for content in model["content"]} assert "untitled.txt" in contents assert contents["untitled.txt"] == file_model # recursive symlinks should not be shown in the contents manager assert "recursive" not in contents async def test_good_symlink(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) parent = "test good symlink" name = "good symlink" path = "{0}/{1}".format(parent, name) _make_dir(cm, parent) file_model = await ensure_async(cm.new(path=parent + "/zfoo.txt")) # create a good symlink symlink(cm, file_model["path"], path) symlink_model = await ensure_async(cm.get(path, content=False)) dir_model = await ensure_async(cm.get(parent)) assert sorted(dir_model["content"], key=lambda x: x["name"]) == [symlink_model, file_model] @pytest.mark.skipif(sys.platform.startswith("win"), reason="Can't test permissions on Windows") async def test_403(jp_file_contents_manager_class, tmp_path): if hasattr(os, "getuid"): if os.getuid() == 0: raise pytest.skip("Can't test permissions as root") td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) model = await ensure_async(cm.new_untitled(type="file")) os_path = cm._get_os_path(model["path"]) os.chmod(os_path, 0o400) try: with cm.open(os_path, "w") as f: f.write(u"don't care") except HTTPError as e: assert e.status_code == 403 async def test_escape_root(jp_file_contents_manager_class, tmp_path): td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) # make foo, bar next to root with open(os.path.join(cm.root_dir, "..", "foo"), "w") as f: f.write("foo") with open(os.path.join(cm.root_dir, "..", "bar"), "w") as f: f.write("bar") with pytest.raises(HTTPError) as e: await ensure_async(cm.get("..")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async(cm.get("foo/../../../bar")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async(cm.delete("../foo")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async(cm.rename("../foo", "../bar")) expected_http_error(e, 404) with pytest.raises(HTTPError) as e: await ensure_async( cm.save( model={ "type": "file", "content": u"", "format": "text", }, path="../foo", ) ) expected_http_error(e, 404) async def test_new_untitled(jp_contents_manager): cm = jp_contents_manager # Test in root directory model = await ensure_async(cm.new_untitled(type="notebook")) assert isinstance(model, dict) assert "name" in model assert "path" in model assert "type" in model assert model["type"] == "notebook" assert model["name"] == "Untitled.ipynb" assert model["path"] == "Untitled.ipynb" # Test in sub-directory model = await ensure_async(cm.new_untitled(type="directory")) assert isinstance(model, dict) assert "name" in model assert "path" in model assert "type" in model assert model["type"] == "directory" assert model["name"] == "Untitled Folder" assert model["path"] == "Untitled Folder" sub_dir = model["path"] model = await ensure_async(cm.new_untitled(path=sub_dir)) assert isinstance(model, dict) assert "name" in model assert "path" in model assert "type" in model assert model["type"] == "file" assert model["name"] == "untitled" assert model["path"] == "%s/untitled" % sub_dir # Test with a compound extension model = await ensure_async(cm.new_untitled(path=sub_dir, ext=".foo.bar")) assert model["name"] == "untitled.foo.bar" model = await ensure_async(cm.new_untitled(path=sub_dir, ext=".foo.bar")) assert model["name"] == "untitled1.foo.bar" async def test_modified_date(jp_contents_manager): cm = jp_contents_manager # Create a new notebook. nb, name, path = await new_notebook(cm) model = await ensure_async(cm.get(path)) # Add a cell and save. add_code_cell(model["content"]) await ensure_async(cm.save(model, path)) # Reload notebook and verify that last_modified incremented. saved = await ensure_async(cm.get(path)) assert saved["last_modified"] >= model["last_modified"] # Move the notebook and verify that last_modified stayed the same. # (The frontend fires a warning if last_modified increases on the # renamed file.) new_path = "renamed.ipynb" await ensure_async(cm.rename(path, new_path)) renamed = await ensure_async(cm.get(new_path)) assert renamed["last_modified"] >= saved["last_modified"] async def test_get(jp_contents_manager): cm = jp_contents_manager # Create a notebook model = await ensure_async(cm.new_untitled(type="notebook")) name = model["name"] path = model["path"] # Check that we 'get' on the notebook we just created model2 = await ensure_async(cm.get(path)) assert isinstance(model2, dict) assert "name" in model2 assert "path" in model2 assert model["name"] == name assert model["path"] == path nb_as_file = await ensure_async(cm.get(path, content=True, type="file")) assert nb_as_file["path"] == path assert nb_as_file["type"] == "file" assert nb_as_file["format"] == "text" assert not isinstance(nb_as_file["content"], dict) nb_as_bin_file = await ensure_async(cm.get(path, content=True, type="file", format="base64")) assert nb_as_bin_file["format"] == "base64" # Test in sub-directory sub_dir = "/foo/" _make_dir(cm, "foo") await ensure_async(cm.new_untitled(path=sub_dir, ext=".ipynb")) model2 = await ensure_async(cm.get(sub_dir + name)) assert isinstance(model2, dict) assert "name" in model2 assert "path" in model2 assert "content" in model2 assert model2["name"] == "Untitled.ipynb" assert model2["path"] == "{0}/{1}".format(sub_dir.strip("/"), name) # Test with a regular file. file_model_path = (await ensure_async(cm.new_untitled(path=sub_dir, ext=".txt")))["path"] file_model = await ensure_async(cm.get(file_model_path)) expected_model = { "content": u"", "format": u"text", "mimetype": u"text/plain", "name": u"untitled.txt", "path": u"foo/untitled.txt", "type": u"file", "writable": True, } # Assert expected model is in file_model for key, value in expected_model.items(): assert file_model[key] == value assert "created" in file_model assert "last_modified" in file_model # Create a sub-sub directory to test getting directory contents with a # subdir. _make_dir(cm, "foo/bar") dirmodel = await ensure_async(cm.get("foo")) assert dirmodel["type"] == "directory" assert isinstance(dirmodel["content"], list) assert len(dirmodel["content"]) == 3 assert dirmodel["path"] == "foo" assert dirmodel["name"] == "foo" # Directory contents should match the contents of each individual entry # when requested with content=False. model2_no_content = await ensure_async(cm.get(sub_dir + name, content=False)) file_model_no_content = await ensure_async(cm.get(u"foo/untitled.txt", content=False)) sub_sub_dir_no_content = await ensure_async(cm.get("foo/bar", content=False)) assert sub_sub_dir_no_content["path"] == "foo/bar" assert sub_sub_dir_no_content["name"] == "bar" for entry in dirmodel["content"]: # Order isn't guaranteed by the spec, so this is a hacky way of # verifying that all entries are matched. if entry["path"] == sub_sub_dir_no_content["path"]: assert entry == sub_sub_dir_no_content elif entry["path"] == model2_no_content["path"]: assert entry == model2_no_content elif entry["path"] == file_model_no_content["path"]: assert entry == file_model_no_content else: assert False, "Unexpected directory entry: %s" % entry() with pytest.raises(HTTPError): await ensure_async(cm.get("foo", type="file")) async def test_update(jp_contents_manager): cm = jp_contents_manager # Create a notebook. model = await ensure_async(cm.new_untitled(type="notebook")) name = model["name"] path = model["path"] # Change the name in the model for rename model["path"] = "test.ipynb" model = await ensure_async(cm.update(model, path)) assert isinstance(model, dict) assert "name" in model assert "path" in model assert model["name"] == "test.ipynb" # Make sure the old name is gone with pytest.raises(HTTPError): await ensure_async(cm.get(path)) # Test in sub-directory # Create a directory and notebook in that directory sub_dir = "/foo/" _make_dir(cm, "foo") model = await ensure_async(cm.new_untitled(path=sub_dir, type="notebook")) path = model["path"] # Change the name in the model for rename d = path.rsplit("/", 1)[0] new_path = model["path"] = d + "/test_in_sub.ipynb" model = await ensure_async(cm.update(model, path)) assert isinstance(model, dict) assert "name" in model assert "path" in model assert model["name"] == "test_in_sub.ipynb" assert model["path"] == new_path # Make sure the old name is gone with pytest.raises(HTTPError): await ensure_async(cm.get(path)) async def test_save(jp_contents_manager): cm = jp_contents_manager # Create a notebook model = await ensure_async(cm.new_untitled(type="notebook")) name = model["name"] path = model["path"] # Get the model with 'content' full_model = await ensure_async(cm.get(path)) # Save the notebook model = await ensure_async(cm.save(full_model, path)) assert isinstance(model, dict) assert "name" in model assert "path" in model assert model["name"] == name assert model["path"] == path # Test in sub-directory # Create a directory and notebook in that directory sub_dir = "/foo/" _make_dir(cm, "foo") model = await ensure_async(cm.new_untitled(path=sub_dir, type="notebook")) path = model["path"] model = await ensure_async(cm.get(path)) # Change the name in the model for rename model = await ensure_async(cm.save(model, path)) assert isinstance(model, dict) assert "name" in model assert "path" in model assert model["name"] == "Untitled.ipynb" assert model["path"] == "foo/Untitled.ipynb" async def test_delete(jp_contents_manager): cm = jp_contents_manager # Create a notebook nb, name, path = await new_notebook(cm) # Delete the notebook await ensure_async(cm.delete(path)) # Check that deleting a non-existent path raises an error. with pytest.raises(HTTPError): await ensure_async(cm.delete(path)) # Check that a 'get' on the deleted notebook raises and error with pytest.raises(HTTPError): await ensure_async(cm.get(path)) @pytest.mark.parametrize( "delete_to_trash, always_delete, error", ( [True, True, False], # on linux test folder may not be on home folder drive # => if this is the case, _check_trash will be False [True, False, None], [False, True, False], [False, False, True], ), ) async def test_delete_non_empty_folder(delete_to_trash, always_delete, error, jp_contents_manager): cm = jp_contents_manager cm.delete_to_trash = delete_to_trash cm.always_delete_dir = always_delete dir = "to_delete" await make_populated_dir(cm, dir) await check_populated_dir_files(cm, dir) if error is None: error = False if sys.platform == "win32": error = True elif sys.platform == "linux": file_dev = os.stat(cm.root_dir).st_dev home_dev = os.stat(os.path.expanduser("~")).st_dev error = file_dev != home_dev if error: with pytest.raises( HTTPError, match=r"HTTP 400: Bad Request \(Directory .*?to_delete not empty\)", ): await ensure_async(cm.delete_file(dir)) else: await ensure_async(cm.delete_file(dir)) assert await ensure_async(cm.dir_exists(dir)) == False async def test_rename(jp_contents_manager): cm = jp_contents_manager # Create a new notebook nb, name, path = await new_notebook(cm) # Rename the notebook await ensure_async(cm.rename(path, "changed_path")) # Attempting to get the notebook under the old name raises an error with pytest.raises(HTTPError): await ensure_async(cm.get(path)) # Fetching the notebook under the new name is successful assert isinstance(await ensure_async(cm.get("changed_path")), dict) # Ported tests on nested directory renaming from pgcontents all_dirs = ["foo", "bar", "foo/bar", "foo/bar/foo", "foo/bar/foo/bar"] unchanged_dirs = all_dirs[:2] changed_dirs = all_dirs[2:] for _dir in all_dirs: await make_populated_dir(cm, _dir) await check_populated_dir_files(cm, _dir) # Renaming to an existing directory should fail for src, dest in combinations(all_dirs, 2): with pytest.raises(HTTPError) as e: await ensure_async(cm.rename(src, dest)) assert expected_http_error(e, 409) # Creating a notebook in a non_existant directory should fail with pytest.raises(HTTPError) as e: await ensure_async(cm.new_untitled("foo/bar_diff", ext=".ipynb")) assert expected_http_error(e, 404) await ensure_async(cm.rename("foo/bar", "foo/bar_diff")) # Assert that unchanged directories remain so for unchanged in unchanged_dirs: await check_populated_dir_files(cm, unchanged) # Assert changed directories can no longer be accessed under old names for changed_dirname in changed_dirs: with pytest.raises(HTTPError) as e: await ensure_async(cm.get(changed_dirname)) assert expected_http_error(e, 404) new_dirname = changed_dirname.replace("foo/bar", "foo/bar_diff", 1) await check_populated_dir_files(cm, new_dirname) # Created a notebook in the renamed directory should work await ensure_async(cm.new_untitled("foo/bar_diff", ext=".ipynb")) async def test_delete_root(jp_contents_manager): cm = jp_contents_manager with pytest.raises(HTTPError) as e: await ensure_async(cm.delete("")) assert expected_http_error(e, 400) async def test_copy(jp_contents_manager): cm = jp_contents_manager parent = u"å b" name = u"nb √.ipynb" path = u"{0}/{1}".format(parent, name) _make_dir(cm, parent) orig = await ensure_async(cm.new(path=path)) # copy with unspecified name copy = await ensure_async(cm.copy(path)) assert copy["name"] == orig["name"].replace(".ipynb", "-Copy1.ipynb") # copy with specified name copy2 = await ensure_async(cm.copy(path, u"å b/copy 2.ipynb")) assert copy2["name"] == u"copy 2.ipynb" assert copy2["path"] == u"å b/copy 2.ipynb" # copy with specified path copy2 = await ensure_async(cm.copy(path, u"/")) assert copy2["name"] == name assert copy2["path"] == name async def test_mark_trusted_cells(jp_contents_manager): cm = jp_contents_manager nb, name, path = await new_notebook(cm) cm.mark_trusted_cells(nb, path) for cell in nb.cells: if cell.cell_type == "code": assert not cell.metadata.trusted await ensure_async(cm.trust_notebook(path)) nb = (await ensure_async(cm.get(path)))["content"] for cell in nb.cells: if cell.cell_type == "code": assert cell.metadata.trusted async def test_check_and_sign(jp_contents_manager): cm = jp_contents_manager nb, name, path = await new_notebook(cm) cm.mark_trusted_cells(nb, path) cm.check_and_sign(nb, path) assert not cm.notary.check_signature(nb) await ensure_async(cm.trust_notebook(path)) nb = (await ensure_async(cm.get(path)))["content"] cm.mark_trusted_cells(nb, path) cm.check_and_sign(nb, path) assert cm.notary.check_signature(nb) jupyter_server-1.13.1/jupyter_server/tests/services/kernels/000077500000000000000000000000001415445537200244175ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/kernels/__init__.py000066400000000000000000000000001415445537200265160ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/kernels/test_api.py000066400000000000000000000172031415445537200266040ustar00rootroot00000000000000import json import time import pytest import tornado from jupyter_client.kernelspec import NATIVE_KERNEL_NAME from tornado.httpclient import HTTPClientError from ...utils import expected_http_error from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager from jupyter_server.utils import url_path_join class TestMappingKernelManager(AsyncMappingKernelManager): """A no-op subclass to use in a fixture""" @pytest.fixture( params=["MappingKernelManager", "AsyncMappingKernelManager", "TestMappingKernelManager"] ) def jp_argv(request): if request.param == "TestMappingKernelManager": extra = [] if hasattr(AsyncMappingKernelManager, "use_pending_kernels"): extra = ["--AsyncMappingKernelManager.use_pending_kernels=True"] return [ "--ServerApp.kernel_manager_class=jupyter_server.tests.services.kernels.test_api." + request.param ] + extra return [ "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + request.param ] async def test_no_kernels(jp_fetch): r = await jp_fetch("api", "kernels", method="GET") kernels = json.loads(r.body.decode()) assert kernels == [] async def test_default_kernels(jp_fetch, jp_base_url, jp_cleanup_subprocesses): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel["id"]) assert r.code == 201 assert isinstance(kernel, dict) report_uri = url_path_join(jp_base_url, "/api/security/csp-report") expected_csp = "; ".join( ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] ) assert r.headers["Content-Security-Policy"] == expected_csp await jp_cleanup_subprocesses() async def test_main_kernel_handler(jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp): # Start the first kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kernel1 = json.loads(r.body.decode()) assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel1["id"]) assert r.code == 201 assert isinstance(kernel1, dict) report_uri = url_path_join(jp_base_url, "/api/security/csp-report") expected_csp = "; ".join( ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] ) assert r.headers["Content-Security-Policy"] == expected_csp # Check that the kernel is found in the kernel list r = await jp_fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel_list, list) assert kernel_list[0]["id"] == kernel1["id"] assert kernel_list[0]["name"] == kernel1["name"] # Start a second kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kernel2 = json.loads(r.body.decode()) assert isinstance(kernel2, dict) # Get kernel list again r = await jp_fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel_list, list) assert len(kernel_list) == 2 # Interrupt a kernel r = await jp_fetch( "api", "kernels", kernel2["id"], "interrupt", method="POST", allow_nonstandard_methods=True ) assert r.code == 204 # Restart a kernel kernel = jp_serverapp.kernel_manager.get_kernel(kernel2["id"]) if hasattr(kernel, "ready"): await kernel.ready r = await jp_fetch( "api", "kernels", kernel2["id"], "restart", method="POST", allow_nonstandard_methods=True ) restarted_kernel = json.loads(r.body.decode()) assert restarted_kernel["id"] == kernel2["id"] assert restarted_kernel["name"] == kernel2["name"] # Start a kernel with a path r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME, "path": "/foo"}), ) kernel3 = json.loads(r.body.decode()) assert isinstance(kernel3, dict) await jp_cleanup_subprocesses() async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses): # Create a kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kernel_id = json.loads(r.body.decode())["id"] r = await jp_fetch("api", "kernels", kernel_id, method="GET") kernel = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel, dict) assert "id" in kernel assert kernel["id"] == kernel_id # Requests a bad kernel id. bad_id = "111-111-111-111-111" with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "kernels", bad_id, method="GET") assert expected_http_error(e, 404) # Delete kernel with id. r = await jp_fetch( "api", "kernels", kernel_id, method="DELETE", ) assert r.code == 204 # Get list of kernels r = await jp_fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert kernel_list == [] # Request to delete a non-existent kernel id bad_id = "111-111-111-111-111" with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "kernels", bad_id, method="DELETE") assert expected_http_error(e, 404, "Kernel does not exist: " + bad_id) await jp_cleanup_subprocesses() async def test_kernel_handler_startup_error( jp_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs ): if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return # Create a kernel with pytest.raises(HTTPClientError): await jp_fetch("api", "kernels", method="POST", body=json.dumps({"name": "bad"})) async def test_kernel_handler_startup_error_pending( jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs ): if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return jp_serverapp.kernel_manager.use_pending_kernels = True # Create a kernel r = await jp_fetch("api", "kernels", method="POST", body=json.dumps({"name": "bad"})) kid = json.loads(r.body.decode())["id"] with pytest.raises(HTTPClientError): await jp_ws_fetch("api", "kernels", kid, "channels") async def test_connection( jp_fetch, jp_ws_fetch, jp_http_port, jp_auth_header, jp_cleanup_subprocesses ): # Create kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kid = json.loads(r.body.decode())["id"] # Get kernel info r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 0 # Open a websocket connection. ws = await jp_ws_fetch("api", "kernels", kid, "channels") # Test that it was opened. r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 1 # Close websocket ws.close() # give it some time to close on the other side: for i in range(10): r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) if model["connections"] > 0: time.sleep(0.1) else: break r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 0 await jp_cleanup_subprocesses() jupyter_server-1.13.1/jupyter_server/tests/services/kernels/test_config.py000066400000000000000000000013431415445537200272760ustar00rootroot00000000000000import pytest from traitlets.config import Config from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager @pytest.fixture def jp_server_config(): return Config( {"ServerApp": {"MappingKernelManager": {"allowed_message_types": ["kernel_info_request"]}}} ) def test_config(jp_serverapp): assert jp_serverapp.kernel_manager.allowed_message_types == ["kernel_info_request"] async def test_async_kernel_manager(jp_configurable_serverapp): argv = [ "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager.AsyncMappingKernelManager" ] app = jp_configurable_serverapp(argv=argv) assert isinstance(app.kernel_manager, AsyncMappingKernelManager) jupyter_server-1.13.1/jupyter_server/tests/services/kernels/test_cull.py000066400000000000000000000055651415445537200270020ustar00rootroot00000000000000import asyncio import json import platform import pytest from tornado.httpclient import HTTPClientError from traitlets.config import Config @pytest.fixture(params=["MappingKernelManager", "AsyncMappingKernelManager"]) def jp_argv(request): return [ "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + request.param ] CULL_TIMEOUT = 30 if platform.python_implementation() == "PyPy" else 5 CULL_INTERVAL = 1 @pytest.fixture def jp_server_config(): return Config( { "ServerApp": { "MappingKernelManager": { "cull_idle_timeout": CULL_TIMEOUT, "cull_interval": CULL_INTERVAL, "cull_connected": False, } } } ) async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) kid = kernel["id"] # Open a websocket connection. ws = await jp_ws_fetch("api", "kernels", kid, "channels") r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 1 culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled assert not culled ws.close() culled = await get_cull_status(kid, jp_fetch) # not connected, should be culled assert culled await jp_cleanup_subprocesses() async def test_cull_dead( jp_fetch, jp_ws_fetch, jp_serverapp, jp_cleanup_subprocesses, jp_kernelspecs ): if not hasattr(jp_serverapp.kernel_manager, "use_pending_kernels"): return jp_serverapp.kernel_manager.use_pending_kernels = True jp_serverapp.kernel_manager.default_kernel_name = "bad" r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) kid = kernel["id"] # Open a websocket connection. with pytest.raises(HTTPClientError): await jp_ws_fetch("api", "kernels", kid, "channels") r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 0 culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled assert culled await jp_cleanup_subprocesses() async def get_cull_status(kid, jp_fetch): frequency = 0.5 culled = False for _ in range( int((CULL_TIMEOUT + CULL_INTERVAL) / frequency) ): # Timeout + Interval will ensure cull try: r = await jp_fetch("api", "kernels", kid, method="GET") json.loads(r.body.decode()) except HTTPClientError as e: assert e.code == 404 culled = True break else: await asyncio.sleep(frequency) return culled jupyter_server-1.13.1/jupyter_server/tests/services/kernelspecs/000077500000000000000000000000001415445537200252725ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/kernelspecs/__init__.py000066400000000000000000000000001415445537200273710ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/kernelspecs/test_api.py000066400000000000000000000056371415445537200274670ustar00rootroot00000000000000import json import pytest import tornado from jupyter_client.kernelspec import NATIVE_KERNEL_NAME from ...utils import expected_http_error from ...utils import some_resource async def test_list_kernelspecs_bad(jp_fetch, jp_kernelspecs, jp_data_dir): bad_kernel_dir = jp_data_dir.joinpath(jp_data_dir, "kernels", "bad2") bad_kernel_dir.mkdir(parents=True) bad_kernel_json = bad_kernel_dir.joinpath("kernel.json") bad_kernel_json.write_text("garbage") r = await jp_fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) assert model["default"] == NATIVE_KERNEL_NAME specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 async def test_list_kernelspecs(jp_fetch, jp_kernelspecs): r = await jp_fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) assert model["default"] == NATIVE_KERNEL_NAME specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 def is_sample_kernelspec(s): return s["name"] == "sample" and s["spec"]["display_name"] == "Test kernel" def is_default_kernelspec(s): return s["name"] == NATIVE_KERNEL_NAME and s["spec"]["display_name"].startswith("Python") assert any(is_sample_kernelspec(s) for s in specs.values()), specs assert any(is_default_kernelspec(s) for s in specs.values()), specs async def test_get_kernelspecs(jp_fetch, jp_kernelspecs): r = await jp_fetch("api", "kernelspecs", "Sample", method="GET") model = json.loads(r.body.decode()) assert model["name"].lower() == "sample" assert isinstance(model["spec"], dict) assert model["spec"]["display_name"] == "Test kernel" assert isinstance(model["resources"], dict) async def test_get_kernelspec_spaces(jp_fetch, jp_kernelspecs): r = await jp_fetch("api", "kernelspecs", "sample%202", method="GET") model = json.loads(r.body.decode()) assert model["name"].lower() == "sample 2" async def test_get_nonexistant_kernelspec(jp_fetch, jp_kernelspecs): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "kernelspecs", "nonexistant", method="GET") assert expected_http_error(e, 404) async def test_get_kernel_resource_file(jp_fetch, jp_kernelspecs): r = await jp_fetch("kernelspecs", "sAmple", "resource.txt", method="GET") res = r.body.decode("utf-8") assert res == some_resource async def test_get_nonexistant_resource(jp_fetch, jp_kernelspecs): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("kernelspecs", "nonexistant", "resource.txt", method="GET") assert expected_http_error(e, 404) with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("kernelspecs", "sample", "nonexistant.txt", method="GET") assert expected_http_error(e, 404) jupyter_server-1.13.1/jupyter_server/tests/services/nbconvert/000077500000000000000000000000001415445537200247545ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/nbconvert/__init__.py000066400000000000000000000000001415445537200270530ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/nbconvert/test_api.py000066400000000000000000000010311415445537200271310ustar00rootroot00000000000000import json async def test_list_formats(jp_fetch): r = await jp_fetch("api", "nbconvert", method="GET") formats = json.loads(r.body.decode()) # Verify the type of the response. assert isinstance(formats, dict) # Verify that all returned formats have an # output mimetype defined. required_keys_present = [] for name, data in formats.items(): required_keys_present.append("output_mimetype" in data) assert all(required_keys_present), "All returned formats must have a `output_mimetype` key." jupyter_server-1.13.1/jupyter_server/tests/services/sessions/000077500000000000000000000000001415445537200246225ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/sessions/__init__.py000066400000000000000000000000001415445537200267210ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/services/sessions/test_api.py000066400000000000000000000420421415445537200270060ustar00rootroot00000000000000import json import os import shutil import time import pytest import tornado from jupyter_client.ioloop import AsyncIOLoopKernelManager from nbformat import writes from nbformat.v4 import new_notebook from tornado.httpclient import HTTPClientError from traitlets import default from ...utils import expected_http_error from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager from jupyter_server.utils import url_path_join j = lambda r: json.loads(r.body.decode()) class NewPortsKernelManager(AsyncIOLoopKernelManager): @default("cache_ports") def _default_cache_ports(self) -> bool: return False async def restart_kernel(self, now: bool = False, newports: bool = True, **kw) -> None: self.log.debug(f"DEBUG**** calling super().restart_kernel with newports={newports}") return await super().restart_kernel(now=now, newports=newports, **kw) class NewPortsMappingKernelManager(AsyncMappingKernelManager): @default("kernel_manager_class") def _default_kernel_manager_class(self): self.log.debug("NewPortsMappingKernelManager in _default_kernel_manager_class!") return "jupyter_server.tests.services.sessions.test_api.NewPortsKernelManager" @pytest.fixture( params=["MappingKernelManager", "AsyncMappingKernelManager", "NewPortsMappingKernelManager"] ) def jp_argv(request): if request.param == "NewPortsMappingKernelManager": extra = [] if hasattr(AsyncMappingKernelManager, "use_pending_kernels"): extra = ["--AsyncMappingKernelManager.use_pending_kernels=True"] return [ "--ServerApp.kernel_manager_class=jupyter_server.tests.services.sessions.test_api." + request.param ] + extra return [ "--ServerApp.kernel_manager_class=jupyter_server.services.kernels.kernelmanager." + request.param ] class SessionClient: def __init__(self, fetch_callable): self.jp_fetch = fetch_callable async def _req(self, *args, method, body=None): if body is not None: body = json.dumps(body) r = await self.jp_fetch( "api", "sessions", *args, method=method, body=body, allow_nonstandard_methods=True ) return r async def list(self): return await self._req(method="GET") async def get(self, id): return await self._req(id, method="GET") async def create(self, path, type="notebook", kernel_name=None, kernel_id=None): body = {"path": path, "type": type, "kernel": {"name": kernel_name, "id": kernel_id}} return await self._req(method="POST", body=body) def create_deprecated(self, path): body = {"notebook": {"path": path}, "kernel": {"name": "python", "id": "foo"}} return self._req(method="POST", body=body) def modify_path(self, id, path): body = {"path": path} return self._req(id, method="PATCH", body=body) def modify_path_deprecated(self, id, path): body = {"notebook": {"path": path}} return self._req(id, method="PATCH", body=body) def modify_type(self, id, type): body = {"type": type} return self._req(id, method="PATCH", body=body) def modify_kernel_name(self, id, kernel_name): body = {"kernel": {"name": kernel_name}} return self._req(id, method="PATCH", body=body) def modify_kernel_id(self, id, kernel_id): # Also send a dummy name to show that id takes precedence. body = {"kernel": {"id": kernel_id, "name": "foo"}} return self._req(id, method="PATCH", body=body) async def delete(self, id): return await self._req(id, method="DELETE") async def cleanup(self): resp = await self.list() sessions = j(resp) for session in sessions: await self.delete(session["id"]) time.sleep(0.1) @pytest.fixture def session_client(jp_root_dir, jp_fetch): subdir = jp_root_dir.joinpath("foo") subdir.mkdir() # Write a notebook to subdir. nb = new_notebook() nb_str = writes(nb, version=4) nbpath = subdir.joinpath("nb1.ipynb") nbpath.write_text(nb_str, encoding="utf-8") # Yield a session client client = SessionClient(jp_fetch) yield client # Remove subdir shutil.rmtree(str(subdir), ignore_errors=True) def assert_kernel_equality(actual, expected): """Compares kernel models after taking into account that execution_states may differ from 'starting' to 'idle'. The 'actual' argument is the current state (which may have an 'idle' status) while the 'expected' argument is the previous state (which may have a 'starting' status). """ actual.pop("execution_state", None) actual.pop("last_activity", None) expected.pop("execution_state", None) expected.pop("last_activity", None) assert actual == expected def assert_session_equality(actual, expected): """Compares session models. `actual` is the most current session, while `expected` is the target of the comparison. This order matters when comparing the kernel sub-models. """ assert actual["id"] == expected["id"] assert actual["path"] == expected["path"] assert actual["type"] == expected["type"] assert_kernel_equality(actual["kernel"], expected["kernel"]) async def test_create(session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp): # Make sure no sessions exist. resp = await session_client.list() sessions = j(resp) assert len(sessions) == 0 # Create a session. resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 new_session = j(resp) assert "id" in new_session assert new_session["path"] == "foo/nb1.ipynb" assert new_session["type"] == "notebook" assert resp.headers["Location"] == url_path_join( jp_base_url, "/api/sessions/", new_session["id"] ) # Make sure kernel is in expected state kid = new_session["kernel"]["id"] kernel = jp_serverapp.kernel_manager.get_kernel(kid) if hasattr(kernel, "ready") and os.name != "nt": km = jp_serverapp.kernel_manager if isinstance(km, AsyncMappingKernelManager): assert kernel.ready.done() == (not km.use_pending_kernels) else: assert kernel.ready.done() # Check that the new session appears in list. resp = await session_client.list() sessions = j(resp) assert len(sessions) == 1 assert_session_equality(sessions[0], new_session) # Retrieve that session. sid = new_session["id"] resp = await session_client.get(sid) got = j(resp) assert_session_equality(got, new_session) # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_create_bad( session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs ): if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return # Make sure no sessions exist. jp_serverapp.kernel_manager.default_kernel_name = "bad" resp = await session_client.list() sessions = j(resp) assert len(sessions) == 0 # Create a session. with pytest.raises(HTTPClientError): await session_client.create("foo/nb1.ipynb") # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_create_bad_pending( session_client, jp_base_url, jp_ws_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs ): if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return # Make sure no sessions exist. jp_serverapp.kernel_manager.default_kernel_name = "bad" resp = await session_client.list() sessions = j(resp) assert len(sessions) == 0 # Create a session. resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 # Open a websocket connection. kid = j(resp)["kernel"]["id"] with pytest.raises(HTTPClientError): await jp_ws_fetch("api", "kernels", kid, "channels") # Get the updated kernel state resp = await session_client.list() session = j(resp)[0] assert session["kernel"]["execution_state"] == "dead" if os.name != "nt": assert "non_existent_path" in session["kernel"]["reason"] # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_create_file_session(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) assert newsession["path"] == "foo/nb1.py" assert newsession["type"] == "file" await session_client.cleanup() await jp_cleanup_subprocesses() async def test_create_console_session(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/abc123", type="console") assert resp.code == 201 newsession = j(resp) assert newsession["path"] == "foo/abc123" assert newsession["type"] == "console" # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_create_deprecated(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create_deprecated("foo/nb1.ipynb") assert resp.code == 201 newsession = j(resp) assert newsession["path"] == "foo/nb1.ipynb" assert newsession["type"] == "notebook" assert newsession["notebook"]["path"] == "foo/nb1.ipynb" # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_create_with_kernel_id( session_client, jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp ): # create a new kernel resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) resp = await session_client.create("foo/nb1.ipynb", kernel_id=kernel["id"]) assert resp.code == 201 new_session = j(resp) assert "id" in new_session assert new_session["path"] == "foo/nb1.ipynb" assert new_session["kernel"]["id"] == kernel["id"] assert resp.headers["Location"] == url_path_join( jp_base_url, "/api/sessions/{0}".format(new_session["id"]) ) resp = await session_client.list() sessions = j(resp) assert len(sessions) == 1 assert_session_equality(sessions[0], new_session) # Retrieve it sid = new_session["id"] resp = await session_client.get(sid) got = j(resp) assert_session_equality(got, new_session) # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_create_with_bad_kernel_id(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) # TODO assert newsession["path"] == "foo/nb1.py" assert newsession["type"] == "file" await session_client.cleanup() await jp_cleanup_subprocesses() async def test_delete(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] resp = await session_client.delete(sid) assert resp.code == 204 resp = await session_client.list() sessions = j(resp) assert sessions == [] with pytest.raises(tornado.httpclient.HTTPClientError) as e: await session_client.get(sid) assert expected_http_error(e, 404) # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_modify_path(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] resp = await session_client.modify_path(sid, "nb2.ipynb") changed = j(resp) assert changed["id"] == sid assert changed["path"] == "nb2.ipynb" # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_modify_path_deprecated(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] resp = await session_client.modify_path_deprecated(sid, "nb2.ipynb") changed = j(resp) assert changed["id"] == sid assert changed["notebook"]["path"] == "nb2.ipynb" # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_modify_type(session_client, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] resp = await session_client.modify_type(sid, "console") changed = j(resp) assert changed["id"] == sid assert changed["type"] == "console" # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_modify_kernel_name(session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.ipynb") before = j(resp) sid = before["id"] resp = await session_client.modify_kernel_name(sid, before["kernel"]["name"]) after = j(resp) assert after["id"] == sid assert after["path"] == before["path"] assert after["type"] == before["type"] assert after["kernel"]["id"] != before["kernel"]["id"] # check kernel list, to be sure previous kernel was cleaned up resp = await jp_fetch("api/kernels", method="GET") kernel_list = j(resp) after["kernel"].pop("last_activity") [k.pop("last_activity") for k in kernel_list] if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): assert kernel_list == [after["kernel"]] # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_modify_kernel_id(session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp): resp = await session_client.create("foo/nb1.ipynb") before = j(resp) sid = before["id"] # create a new kernel resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) # Attach our session to the existing kernel resp = await session_client.modify_kernel_id(sid, kernel["id"]) after = j(resp) assert after["id"] == sid assert after["path"] == before["path"] assert after["type"] == before["type"] assert after["kernel"]["id"] != before["kernel"]["id"] assert after["kernel"]["id"] == kernel["id"] # check kernel list, to be sure previous kernel was cleaned up resp = await jp_fetch("api/kernels", method="GET") kernel_list = j(resp) kernel.pop("last_activity") [k.pop("last_activity") for k in kernel_list] if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): assert kernel_list == [kernel] # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() async def test_restart_kernel( session_client, jp_base_url, jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses ): # Create a session. resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 new_session = j(resp) assert "id" in new_session assert new_session["path"] == "foo/nb1.ipynb" assert new_session["type"] == "notebook" assert resp.headers["Location"] == url_path_join( jp_base_url, "/api/sessions/", new_session["id"] ) kid = new_session["kernel"]["id"] # Get kernel info r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 0 # Open a websocket connection. ws = await jp_ws_fetch("api", "kernels", kid, "channels") # Test that it was opened. r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 1 # Restart kernel r = await jp_fetch( "api", "kernels", kid, "restart", method="POST", allow_nonstandard_methods=True ) restarted_kernel = json.loads(r.body.decode()) assert restarted_kernel["id"] == kid # Close/open websocket ws.close() # give it some time to close on the other side: for i in range(10): r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) if model["connections"] > 0: time.sleep(0.1) else: break r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 0 # Open a websocket connection. await jp_ws_fetch("api", "kernels", kid, "channels") r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 1 # Need to find a better solution to this. await session_client.cleanup() await jp_cleanup_subprocesses() jupyter_server-1.13.1/jupyter_server/tests/services/sessions/test_manager.py000066400000000000000000000274241415445537200276560ustar00rootroot00000000000000import pytest from tornado import web from traitlets import TraitError from jupyter_server._tz import isoformat from jupyter_server._tz import utcnow from jupyter_server.services.contents.manager import ContentsManager from jupyter_server.services.kernels.kernelmanager import MappingKernelManager from jupyter_server.services.sessions.sessionmanager import SessionManager class DummyKernel(object): def __init__(self, kernel_name="python"): self.kernel_name = kernel_name dummy_date = utcnow() dummy_date_s = isoformat(dummy_date) class DummyMKM(MappingKernelManager): """MappingKernelManager interface that doesn't start kernels, for testing""" def __init__(self, *args, **kwargs): super(DummyMKM, self).__init__(*args, **kwargs) self.id_letters = iter(u"ABCDEFGHIJK") def _new_id(self): return next(self.id_letters) async def start_kernel(self, kernel_id=None, path=None, kernel_name="python", **kwargs): kernel_id = kernel_id or self._new_id() k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) self._kernel_connections[kernel_id] = 0 k.last_activity = dummy_date k.execution_state = "idle" return kernel_id async def shutdown_kernel(self, kernel_id, now=False): del self._kernels[kernel_id] @pytest.fixture def session_manager(): return SessionManager(kernel_manager=DummyMKM(), contents_manager=ContentsManager()) async def create_multiple_sessions(session_manager, *kwargs_list): sessions = [] for kwargs in kwargs_list: kwargs.setdefault("type", "notebook") session = await session_manager.create_session(**kwargs) sessions.append(session) return sessions async def test_get_session(session_manager): session = await session_manager.create_session( path="/path/to/test.ipynb", kernel_name="bar", type="notebook" ) session_id = session["id"] model = await session_manager.get_session(session_id=session_id) expected = { "id": session_id, "path": u"/path/to/test.ipynb", "notebook": {"path": u"/path/to/test.ipynb", "name": None}, "type": "notebook", "name": None, "kernel": { "id": "A", "name": "bar", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, } assert model == expected async def test_bad_get_session(session_manager): session = await session_manager.create_session( path="/path/to/test.ipynb", kernel_name="foo", type="notebook" ) with pytest.raises(TypeError): await session_manager.get_session(bad_id=session["id"]) async def test_get_session_dead_kernel(session_manager): session = await session_manager.create_session( path="/path/to/1/test1.ipynb", kernel_name="python", type="notebook" ) # Kill the kernel await session_manager.kernel_manager.shutdown_kernel(session["kernel"]["id"]) with pytest.raises(KeyError): await session_manager.get_session(session_id=session["id"]) # no session left listed = await session_manager.list_sessions() assert listed == [] async def test_list_session(session_manager): sessions = await create_multiple_sessions( session_manager, dict(path="/path/to/1/test1.ipynb", kernel_name="python"), dict(path="/path/to/2/test2.py", type="file", kernel_name="python"), dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), ) sessions = await session_manager.list_sessions() expected = [ { "id": sessions[0]["id"], "path": u"/path/to/1/test1.ipynb", "type": "notebook", "notebook": {"path": u"/path/to/1/test1.ipynb", "name": None}, "name": None, "kernel": { "id": "A", "name": "python", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, }, { "id": sessions[1]["id"], "path": u"/path/to/2/test2.py", "type": "file", "name": None, "kernel": { "id": "B", "name": "python", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, }, { "id": sessions[2]["id"], "path": u"/path/to/3", "type": "console", "name": "foo", "kernel": { "id": "C", "name": "python", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, }, ] assert sessions == expected async def test_list_sessions_dead_kernel(session_manager): sessions = await create_multiple_sessions( session_manager, dict(path="/path/to/1/test1.ipynb", kernel_name="python"), dict(path="/path/to/2/test2.ipynb", kernel_name="python"), ) # kill one of the kernels await session_manager.kernel_manager.shutdown_kernel(sessions[0]["kernel"]["id"]) listed = await session_manager.list_sessions() expected = [ { "id": sessions[1]["id"], "path": u"/path/to/2/test2.ipynb", "type": "notebook", "name": None, "notebook": {"path": u"/path/to/2/test2.ipynb", "name": None}, "kernel": { "id": "B", "name": "python", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, } ] assert listed == expected async def test_update_session(session_manager): session = await session_manager.create_session( path="/path/to/test.ipynb", kernel_name="julia", type="notebook" ) session_id = session["id"] await session_manager.update_session(session_id, path="/path/to/new_name.ipynb") model = await session_manager.get_session(session_id=session_id) expected = { "id": session_id, "path": u"/path/to/new_name.ipynb", "type": "notebook", "name": None, "notebook": {"path": u"/path/to/new_name.ipynb", "name": None}, "kernel": { "id": "A", "name": "julia", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, } assert model == expected async def test_bad_update_session(session_manager): # try to update a session with a bad keyword ~ raise error session = await session_manager.create_session( path="/path/to/test.ipynb", kernel_name="ir", type="notegbook" ) session_id = session["id"] with pytest.raises(TypeError): await session_manager.update_session( session_id=session_id, bad_kw="test.ipynb" ) # Bad keyword async def test_delete_session(session_manager): sessions = await create_multiple_sessions( session_manager, dict(path="/path/to/1/test1.ipynb", kernel_name="python"), dict(path="/path/to/2/test2.ipynb", kernel_name="python"), dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), ) await session_manager.delete_session(sessions[1]["id"]) new_sessions = await session_manager.list_sessions() expected = [ { "id": sessions[0]["id"], "path": u"/path/to/1/test1.ipynb", "type": "notebook", "name": None, "notebook": {"path": u"/path/to/1/test1.ipynb", "name": None}, "kernel": { "id": "A", "name": "python", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, }, { "id": sessions[2]["id"], "type": "console", "path": u"/path/to/3", "name": "foo", "kernel": { "id": "C", "name": "python", "connections": 0, "last_activity": dummy_date_s, "execution_state": "idle", }, }, ] assert new_sessions == expected async def test_bad_delete_session(session_manager): # try to delete a session that doesn't exist ~ raise error await session_manager.create_session( path="/path/to/test.ipynb", kernel_name="python", type="notebook" ) with pytest.raises(TypeError): await session_manager.delete_session(bad_kwarg="23424") # Bad keyword with pytest.raises(web.HTTPError): await session_manager.delete_session(session_id="23424") # nonexistent async def test_bad_database_filepath(jp_runtime_dir): kernel_manager = DummyMKM() # Try to write to a path that's a directory, not a file. path_id_directory = str(jp_runtime_dir) # Should raise an error because the path is a directory. with pytest.raises(TraitError) as err: SessionManager( kernel_manager=kernel_manager, contents_manager=ContentsManager(), database_filepath=str(path_id_directory), ) # Try writing to file that's not a valid SQLite 3 database file. non_db_file = jp_runtime_dir.joinpath("non_db_file.db") non_db_file.write_bytes(b"this is a bad file") # Should raise an error because the file doesn't # start with an SQLite database file header. with pytest.raises(TraitError) as err: SessionManager( kernel_manager=kernel_manager, contents_manager=ContentsManager(), database_filepath=str(non_db_file), ) async def test_good_database_filepath(jp_runtime_dir): kernel_manager = DummyMKM() # Try writing to an empty file. empty_file = jp_runtime_dir.joinpath("empty.db") empty_file.write_bytes(b"") session_manager = SessionManager( kernel_manager=kernel_manager, contents_manager=ContentsManager(), database_filepath=str(empty_file), ) await session_manager.create_session( path="/path/to/test.ipynb", kernel_name="python", type="notebook" ) # Assert that the database file exists assert empty_file.exists() # Close the current session manager del session_manager # Try writing to a file that already exists. session_manager = SessionManager( kernel_manager=kernel_manager, contents_manager=ContentsManager(), database_filepath=str(empty_file), ) assert session_manager.database_filepath == str(empty_file) async def test_session_persistence(jp_runtime_dir): session_db_path = jp_runtime_dir.joinpath("test-session.db") # Kernel manager needs to persist. kernel_manager = DummyMKM() # Initialize a session and start a connection. # This should create the session database the first time. session_manager = SessionManager( kernel_manager=kernel_manager, contents_manager=ContentsManager(), database_filepath=str(session_db_path), ) session = await session_manager.create_session( path="/path/to/test.ipynb", kernel_name="python", type="notebook" ) # Assert that the database file exists assert session_db_path.exists() with open(session_db_path, "rb") as f: header = f.read(100) assert header.startswith(b"SQLite format 3") # Close the current session manager del session_manager # Get a new session_manager session_manager = SessionManager( kernel_manager=kernel_manager, contents_manager=ContentsManager(), database_filepath=str(session_db_path), ) # Assert that the session database persists. session = await session_manager.get_session(session_id=session["id"]) jupyter_server-1.13.1/jupyter_server/tests/test_config_manager.py000066400000000000000000000034321415445537200255030ustar00rootroot00000000000000import json import os from jupyter_server.config_manager import BaseJSONConfigManager def test_json(tmp_path): tmpdir = str(tmp_path) root_data = dict(a=1, x=2, nest={"a": 1, "x": 2}) with open(os.path.join(tmpdir, "foo.json"), "w") as f: json.dump(root_data, f) # also make a foo.d/ directory with multiple json files os.makedirs(os.path.join(tmpdir, "foo.d")) with open(os.path.join(tmpdir, "foo.d", "a.json"), "w") as f: json.dump(dict(a=2, b=1, nest={"a": 2, "b": 1}), f) with open(os.path.join(tmpdir, "foo.d", "b.json"), "w") as f: json.dump(dict(a=3, b=2, c=3, nest={"a": 3, "b": 2, "c": 3}, only_in_b={"x": 1}), f) manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) data = manager.get("foo") assert "a" in data assert "x" in data assert "b" not in data assert "c" not in data assert data["a"] == 1 assert "x" in data["nest"] # if we write it out, it also shouldn't pick up the subdirectoy manager.set("foo", data) data = manager.get("foo") assert data == root_data manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=True) data = manager.get("foo") assert "a" in data assert "b" in data assert "c" in data # files should be read in order foo.d/a.json foo.d/b.json foo.json assert data["a"] == 1 assert data["b"] == 2 assert data["c"] == 3 assert data["nest"]["a"] == 1 assert data["nest"]["b"] == 2 assert data["nest"]["c"] == 3 assert data["nest"]["x"] == 2 # when writing out, we don't want foo.d/*.json data to be included in the root foo.json manager.set("foo", data) manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) data = manager.get("foo") assert data == root_data jupyter_server-1.13.1/jupyter_server/tests/test_files.py000066400000000000000000000107071415445537200236510ustar00rootroot00000000000000import os from pathlib import Path import pytest import tornado from nbformat import writes from nbformat.v4 import new_code_cell from nbformat.v4 import new_markdown_cell from nbformat.v4 import new_notebook from nbformat.v4 import new_output from .utils import expected_http_error @pytest.fixture( params=[[False, ["å b"]], [False, ["å b", "ç. d"]], [True, [".å b"]], [True, ["å b", ".ç d"]]] ) def maybe_hidden(request): return request.param async def fetch_expect_200(jp_fetch, *path_parts): r = await jp_fetch("files", *path_parts, method="GET") assert r.body.decode() == path_parts[-1], (path_parts, r.body) async def fetch_expect_404(jp_fetch, *path_parts): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("files", *path_parts, method="GET") assert expected_http_error(e, 404), [path_parts, e] async def test_hidden_files(jp_fetch, jp_serverapp, jp_root_dir, maybe_hidden): is_hidden, path_parts = maybe_hidden path = Path(jp_root_dir, *path_parts) path.mkdir(parents=True, exist_ok=True) foos = ["foo", ".foo"] for foo in foos: (path / foo).write_text(foo) if is_hidden: for foo in foos: await fetch_expect_404(jp_fetch, *path_parts, foo) else: await fetch_expect_404(jp_fetch, *path_parts, ".foo") await fetch_expect_200(jp_fetch, *path_parts, "foo") jp_serverapp.contents_manager.allow_hidden = True for foo in foos: await fetch_expect_200(jp_fetch, *path_parts, foo) async def test_contents_manager(jp_fetch, jp_serverapp, jp_root_dir): """make sure ContentsManager returns right files (ipynb, bin, txt).""" nb = new_notebook( cells=[ new_markdown_cell(u"Created by test ³"), new_code_cell( "print(2*6)", outputs=[ new_output("stream", text="12"), ], ), ] ) jp_root_dir.joinpath("testnb.ipynb").write_text(writes(nb, version=4), encoding="utf-8") jp_root_dir.joinpath("test.bin").write_bytes(b"\xff" + os.urandom(5)) jp_root_dir.joinpath("test.txt").write_text("foobar") r = await jp_fetch("files/testnb.ipynb", method="GET") assert r.code == 200 assert "print(2*6)" in r.body.decode("utf-8") r = await jp_fetch("files/test.bin", method="GET") assert r.code == 200 assert r.headers["content-type"] == "application/octet-stream" assert r.body[:1] == b"\xff" assert len(r.body) == 6 r = await jp_fetch("files/test.txt", method="GET") assert r.code == 200 assert r.headers["content-type"] == "text/plain; charset=UTF-8" assert r.body.decode() == "foobar" async def test_download(jp_fetch, jp_serverapp, jp_root_dir): text = "hello" jp_root_dir.joinpath("test.txt").write_text(text) r = await jp_fetch("files", "test.txt", method="GET") disposition = r.headers.get("Content-Disposition", "") assert "attachment" not in disposition r = await jp_fetch("files", "test.txt", method="GET", params={"download": True}) disposition = r.headers.get("Content-Disposition", "") assert "attachment" in disposition assert "filename*=utf-8''test.txt" in disposition async def test_old_files_redirect(jp_fetch, jp_serverapp, jp_root_dir): """pre-2.0 'files/' prefixed links are properly redirected""" jp_root_dir.joinpath("files").mkdir(parents=True, exist_ok=True) jp_root_dir.joinpath("sub", "files").mkdir(parents=True, exist_ok=True) for prefix in ("", "sub"): jp_root_dir.joinpath(prefix, "files", "f1.txt").write_text(prefix + "/files/f1") jp_root_dir.joinpath(prefix, "files", "f2.txt").write_text(prefix + "/files/f2") jp_root_dir.joinpath(prefix, "f2.txt").write_text(prefix + "/f2") jp_root_dir.joinpath(prefix, "f3.txt").write_text(prefix + "/f3") # These depend on the tree handlers # # def test_download(self): # rootdir = self.root_dir # text = 'hello' # with open(pjoin(rootdir, 'test.txt'), 'w') as f: # f.write(text) # r = self.request('GET', 'files/test.txt') # disposition = r.headers.get('Content-Disposition', '') # self.assertNotIn('attachment', disposition) # r = self.request('GET', 'files/test.txt?download=1') # disposition = r.headers.get('Content-Disposition', '') # self.assertIn('attachment', disposition) # self.assertIn("filename*=utf-8''test.txt", disposition) jupyter_server-1.13.1/jupyter_server/tests/test_gateway.py000066400000000000000000000354101415445537200242060ustar00rootroot00000000000000"""Test GatewayClient""" import json import os import uuid from datetime import datetime from io import StringIO from unittest.mock import patch import pytest import tornado from tornado.httpclient import HTTPRequest from tornado.httpclient import HTTPResponse from tornado.web import HTTPError from .utils import expected_http_error from jupyter_server.gateway.managers import GatewayClient from jupyter_server.utils import ensure_async def generate_kernelspec(name): argv_stanza = ["python", "-m", "ipykernel_launcher", "-f", "{connection_file}"] spec_stanza = { "spec": { "argv": argv_stanza, "env": {}, "display_name": name, "language": "python", "interrupt_mode": "signal", "metadata": {}, } } kernelspec_stanza = {"name": name, "spec": spec_stanza, "resources": {}} return kernelspec_stanza # We'll mock up two kernelspecs - kspec_foo and kspec_bar kernelspecs = { "default": "kspec_foo", "kernelspecs": { "kspec_foo": generate_kernelspec("kspec_foo"), "kspec_bar": generate_kernelspec("kspec_bar"), }, } # maintain a dictionary of expected running kernels. Key = kernel_id, Value = model. running_kernels = dict() def generate_model(name): """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" dt = datetime.utcnow().isoformat() + "Z" kernel_id = str(uuid.uuid4()) model = { "id": kernel_id, "name": name, "last_activity": str(dt), "execution_state": "idle", "connections": 1, } return model async def mock_gateway_request(url, **kwargs): method = "GET" if kwargs["method"]: method = kwargs["method"] request = HTTPRequest(url=url, **kwargs) endpoint = str(url) # Fetch all kernelspecs if endpoint.endswith("/api/kernelspecs") and method == "GET": response_buf = StringIO(json.dumps(kernelspecs)) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response # Fetch named kernelspec if endpoint.rfind("/api/kernelspecs/") >= 0 and method == "GET": requested_kernelspec = endpoint.rpartition("/")[2] kspecs = kernelspecs.get("kernelspecs") if requested_kernelspec in kspecs: response_buf = StringIO(json.dumps(kspecs.get(requested_kernelspec))) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response else: raise HTTPError(404, message="Kernelspec does not exist: %s" % requested_kernelspec) # Create kernel if endpoint.endswith("/api/kernels") and method == "POST": json_body = json.loads(kwargs["body"]) name = json_body.get("name") env = json_body.get("env") kspec_name = env.get("KERNEL_KSPEC_NAME") assert name == kspec_name # Ensure that KERNEL_ env values get propagated model = generate_model(name) running_kernels[model.get("id")] = model # Register model as a running kernel response_buf = StringIO(json.dumps(model)) response = await ensure_async(HTTPResponse(request, 201, buffer=response_buf)) return response # Fetch list of running kernels if endpoint.endswith("/api/kernels") and method == "GET": kernels = [] for kernel_id in running_kernels.keys(): model = running_kernels.get(kernel_id) kernels.append(model) response_buf = StringIO(json.dumps(kernels)) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response # Interrupt or restart existing kernel if endpoint.rfind("/api/kernels/") >= 0 and method == "POST": requested_kernel_id, sep, action = endpoint.rpartition("/api/kernels/")[2].rpartition("/") if action == "interrupt": if requested_kernel_id in running_kernels: response = await ensure_async(HTTPResponse(request, 204)) return response else: raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) elif action == "restart": if requested_kernel_id in running_kernels: response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) response = await ensure_async(HTTPResponse(request, 204, buffer=response_buf)) return response else: raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) else: raise HTTPError(404, message="Bad action detected: %s" % action) # Shutdown existing kernel if endpoint.rfind("/api/kernels/") >= 0 and method == "DELETE": requested_kernel_id = endpoint.rpartition("/")[2] running_kernels.pop( requested_kernel_id ) # Simulate shutdown by removing kernel from running set response = await ensure_async(HTTPResponse(request, 204)) return response # Fetch existing kernel if endpoint.rfind("/api/kernels/") >= 0 and method == "GET": requested_kernel_id = endpoint.rpartition("/")[2] if requested_kernel_id in running_kernels: response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) response = await ensure_async(HTTPResponse(request, 200, buffer=response_buf)) return response else: raise HTTPError(404, message="Kernel does not exist: %s" % requested_kernel_id) mocked_gateway = patch("jupyter_server.gateway.managers.gateway_request", mock_gateway_request) mock_gateway_url = "http://mock-gateway-server:8889" mock_http_user = "alice" @pytest.fixture def init_gateway(monkeypatch): """Initializes the server for use as a gateway client. """ # Clear the singleton first since previous tests may not have used a gateway. GatewayClient.clear_instance() monkeypatch.setenv("JUPYTER_GATEWAY_URL", mock_gateway_url) monkeypatch.setenv("JUPYTER_GATEWAY_HTTP_USER", mock_http_user) monkeypatch.setenv("JUPYTER_GATEWAY_REQUEST_TIMEOUT", "44.4") monkeypatch.setenv("JUPYTER_GATEWAY_CONNECT_TIMEOUT", "44.4") yield GatewayClient.clear_instance() async def test_gateway_env_options(init_gateway, jp_serverapp): assert jp_serverapp.gateway_config.gateway_enabled is True assert jp_serverapp.gateway_config.url == mock_gateway_url assert jp_serverapp.gateway_config.http_user == mock_http_user assert ( jp_serverapp.gateway_config.connect_timeout == jp_serverapp.gateway_config.request_timeout ) assert jp_serverapp.gateway_config.connect_timeout == 44.4 GatewayClient.instance().init_static_args() assert GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == int( jp_serverapp.gateway_config.request_timeout ) async def test_gateway_cli_options(jp_configurable_serverapp): argv = [ "--gateway-url=" + mock_gateway_url, "--GatewayClient.http_user=" + mock_http_user, "--GatewayClient.connect_timeout=44.4", "--GatewayClient.request_timeout=96.0", ] GatewayClient.clear_instance() app = jp_configurable_serverapp(argv=argv) assert app.gateway_config.gateway_enabled is True assert app.gateway_config.url == mock_gateway_url assert app.gateway_config.http_user == mock_http_user assert app.gateway_config.connect_timeout == 44.4 assert app.gateway_config.request_timeout == 96.0 GatewayClient.instance().init_static_args() assert ( GatewayClient.instance().KERNEL_LAUNCH_TIMEOUT == 96 ) # Ensure KLT gets set from request-timeout GatewayClient.clear_instance() async def test_gateway_class_mappings(init_gateway, jp_serverapp): # Ensure appropriate class mappings are in place. assert jp_serverapp.kernel_manager_class.__name__ == "GatewayMappingKernelManager" assert jp_serverapp.session_manager_class.__name__ == "GatewaySessionManager" assert jp_serverapp.kernel_spec_manager_class.__name__ == "GatewayKernelSpecManager" async def test_gateway_get_kernelspecs(init_gateway, jp_fetch): # Validate that kernelspecs come from gateway. with mocked_gateway: r = await jp_fetch("api", "kernelspecs", method="GET") assert r.code == 200 content = json.loads(r.body.decode("utf-8")) kspecs = content.get("kernelspecs") assert len(kspecs) == 2 assert kspecs.get("kspec_bar").get("name") == "kspec_bar" async def test_gateway_get_named_kernelspec(init_gateway, jp_fetch): # Validate that a specific kernelspec can be retrieved from gateway (and an invalid spec can't) with mocked_gateway: r = await jp_fetch("api", "kernelspecs", "kspec_foo", method="GET") assert r.code == 200 kspec_foo = json.loads(r.body.decode("utf-8")) assert kspec_foo.get("name") == "kspec_foo" with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "kernelspecs", "no_such_spec", method="GET") assert expected_http_error(e, 404) async def test_gateway_session_lifecycle(init_gateway, jp_root_dir, jp_fetch): # Validate session lifecycle functions; create and delete. # create session_id, kernel_id = await create_session(jp_root_dir, jp_fetch, "kspec_foo") # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True # interrupt await interrupt_kernel(jp_fetch, kernel_id) # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True # restart await restart_kernel(jp_fetch, kernel_id) # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True # delete await delete_session(jp_fetch, session_id) assert await is_kernel_running(jp_fetch, kernel_id) is False async def test_gateway_kernel_lifecycle(init_gateway, jp_fetch): # Validate kernel lifecycle functions; create, interrupt, restart and delete. # create kernel_id = await create_kernel(jp_fetch, "kspec_bar") # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True # interrupt await interrupt_kernel(jp_fetch, kernel_id) # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True # restart await restart_kernel(jp_fetch, kernel_id) # ensure kernel still considered running assert await is_kernel_running(jp_fetch, kernel_id) is True # delete await delete_kernel(jp_fetch, kernel_id) assert await is_kernel_running(jp_fetch, kernel_id) is False # # Test methods below... # async def create_session(root_dir, jp_fetch, kernel_name): """Creates a session for a kernel. The session is created against the server which then uses the gateway for kernel management. """ with mocked_gateway: nb_path = root_dir / "testgw.ipynb" body = json.dumps( {"path": str(nb_path), "type": "notebook", "kernel": {"name": kernel_name}} ) # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method os.environ["KERNEL_KSPEC_NAME"] = kernel_name # Create the kernel... (also tests get_kernel) r = await jp_fetch("api", "sessions", method="POST", body=body) assert r.code == 201 model = json.loads(r.body.decode("utf-8")) assert model.get("path") == str(nb_path) kernel_id = model.get("kernel").get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) assert kernel_id == running_kernel.get("id") assert model.get("kernel").get("name") == running_kernel.get("name") session_id = model.get("id") # restore env os.environ.pop("KERNEL_KSPEC_NAME") return session_id, kernel_id async def delete_session(jp_fetch, session_id): """Deletes a session corresponding to the given session id.""" with mocked_gateway: # Delete the session (and kernel) r = await jp_fetch("api", "sessions", session_id, method="DELETE") assert r.code == 204 assert r.reason == "No Content" async def is_kernel_running(jp_fetch, kernel_id): """Issues request to get the set of running kernels""" with mocked_gateway: # Get list of running kernels r = await jp_fetch("api", "kernels", method="GET") assert r.code == 200 kernels = json.loads(r.body.decode("utf-8")) assert len(kernels) == len(running_kernels) for model in kernels: if model.get("id") == kernel_id: return True return False async def create_kernel(jp_fetch, kernel_name): """Issues request to retart the given kernel""" with mocked_gateway: body = json.dumps({"name": kernel_name}) # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method os.environ["KERNEL_KSPEC_NAME"] = kernel_name r = await jp_fetch("api", "kernels", method="POST", body=body) assert r.code == 201 model = json.loads(r.body.decode("utf-8")) kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) assert kernel_id == running_kernel.get("id") assert model.get("name") == kernel_name # restore env os.environ.pop("KERNEL_KSPEC_NAME") return kernel_id async def interrupt_kernel(jp_fetch, kernel_id): """Issues request to interrupt the given kernel""" with mocked_gateway: r = await jp_fetch( "api", "kernels", kernel_id, "interrupt", method="POST", allow_nonstandard_methods=True ) assert r.code == 204 assert r.reason == "No Content" async def restart_kernel(jp_fetch, kernel_id): """Issues request to retart the given kernel""" with mocked_gateway: r = await jp_fetch( "api", "kernels", kernel_id, "restart", method="POST", allow_nonstandard_methods=True ) assert r.code == 200 model = json.loads(r.body.decode("utf-8")) restarted_kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(restarted_kernel_id) assert restarted_kernel_id == running_kernel.get("id") assert model.get("name") == running_kernel.get("name") async def delete_kernel(jp_fetch, kernel_id): """Deletes kernel corresponding to the given kernel id.""" with mocked_gateway: # Delete the session (and kernel) r = await jp_fetch("api", "kernels", kernel_id, method="DELETE") assert r.code == 204 assert r.reason == "No Content" jupyter_server-1.13.1/jupyter_server/tests/test_paths.py000066400000000000000000000032121415445537200236570ustar00rootroot00000000000000import re import pytest import tornado from jupyter_server.base.handlers import path_regex from jupyter_server.utils import url_path_join # build regexps that tornado uses: path_pat = re.compile("^" + "/x%s" % path_regex + "$") def test_path_regex(): for path in ( "/x", "/x/", "/x/foo", "/x/foo.ipynb", "/x/foo/bar", "/x/foo/bar.txt", ): assert re.match(path_pat, path) def test_path_regex_bad(): for path in ( "/xfoo", "/xfoo/", "/xfoo/bar", "/xfoo/bar/", "/x/foo/bar/", "/x//foo", "/y", "/y/x/foo", ): assert re.match(path_pat, path) is None @pytest.mark.parametrize( "uri,expected", [ ("/notebooks/mynotebook/", "/notebooks/mynotebook"), ("////foo///", "/foo"), ("//example.com/", "/example.com"), ("/has/param/?hasparam=true", "/has/param?hasparam=true"), ], ) async def test_trailing_slash( jp_ensure_app_fixture, uri, expected, http_server_client, jp_auth_header, jp_base_url ): # http_server_client raises an exception when follow_redirects=False with pytest.raises(tornado.httpclient.HTTPClientError) as err: await http_server_client.fetch( url_path_join(jp_base_url, uri), headers=jp_auth_header, request_timeout=20, follow_redirects=False, ) # Capture the response from the raised exception value. response = err.value.response assert response.code == 302 assert "Location" in response.headers assert response.headers["Location"] == url_path_join(jp_base_url, expected) jupyter_server-1.13.1/jupyter_server/tests/test_serialize.py000066400000000000000000000013741415445537200245360ustar00rootroot00000000000000"""Test serialize/deserialize messages with buffers""" import os from jupyter_client.session import Session from jupyter_server.base.zmqhandlers import deserialize_binary_message from jupyter_server.base.zmqhandlers import serialize_binary_message def test_serialize_binary(): s = Session() msg = s.msg("data_pub", content={"a": "b"}) msg["buffers"] = [memoryview(os.urandom(3)) for i in range(3)] bmsg = serialize_binary_message(msg) assert isinstance(bmsg, bytes) def test_deserialize_binary(): s = Session() msg = s.msg("data_pub", content={"a": "b"}) msg["buffers"] = [memoryview(os.urandom(2)) for i in range(3)] bmsg = serialize_binary_message(msg) msg2 = deserialize_binary_message(bmsg) assert msg2 == msg jupyter_server-1.13.1/jupyter_server/tests/test_serverapp.py000066400000000000000000000274601415445537200245620ustar00rootroot00000000000000import getpass import logging import os import pathlib from unittest.mock import patch import pytest from jupyter_core.application import NoStart from traitlets import TraitError from traitlets.tests.utils import check_help_all_output from jupyter_server.auth.security import passwd_check from jupyter_server.serverapp import JupyterPasswordApp from jupyter_server.serverapp import list_running_servers from jupyter_server.serverapp import ServerApp def test_help_output(): """jupyter server --help-all works""" check_help_all_output("jupyter_server") def test_server_info_file(tmp_path, jp_configurable_serverapp): app = jp_configurable_serverapp(log=logging.getLogger()) app.write_server_info_file() servers = list(list_running_servers(app.runtime_dir)) assert len(servers) == 1 sinfo = servers[0] assert sinfo["port"] == app.port assert sinfo["url"] == app.connection_url assert sinfo["version"] == app.version app.remove_server_info_file() assert list(list_running_servers(app.runtime_dir)) == [] app.remove_server_info_file def test_root_dir(tmp_path, jp_configurable_serverapp): app = jp_configurable_serverapp(root_dir=str(tmp_path)) assert app.root_dir == str(tmp_path) # Build a list of invalid paths @pytest.fixture(params=[("notebooks",), ("root", "dir", "is", "missing"), ("test.txt",)]) def invalid_root_dir(tmp_path, request): path = tmp_path.joinpath(*request.param) # If the path is a file, create it. if os.path.splitext(str(path))[1] != "": path.write_text("") return str(path) def test_invalid_root_dir(invalid_root_dir, jp_configurable_serverapp): app = jp_configurable_serverapp() with pytest.raises(TraitError): app.root_dir = invalid_root_dir @pytest.fixture(params=[("/",), ("first-level",), ("first-level", "second-level")]) def valid_root_dir(tmp_path, request): path = tmp_path.joinpath(*request.param) if not path.exists(): # Create path in temporary directory path.mkdir(parents=True) return str(path) def test_valid_root_dir(valid_root_dir, jp_configurable_serverapp): app = jp_configurable_serverapp(root_dir=valid_root_dir) root_dir = valid_root_dir # If nested path, the last slash should # be stripped by the root_dir trait. if root_dir != "/": root_dir = valid_root_dir.rstrip("/") assert app.root_dir == root_dir def test_generate_config(tmp_path, jp_configurable_serverapp): app = jp_configurable_serverapp(config_dir=str(tmp_path)) app.initialize(["--generate-config", "--allow-root"]) with pytest.raises(NoStart): app.start() assert tmp_path.joinpath("jupyter_server_config.py").exists() def test_server_password(tmp_path, jp_configurable_serverapp): password = "secret" with patch.dict("os.environ", {"JUPYTER_CONFIG_DIR": str(tmp_path)}), patch.object( getpass, "getpass", return_value=password ): app = JupyterPasswordApp(log_level=logging.ERROR) app.initialize([]) app.start() sv = jp_configurable_serverapp() sv.load_config_file() assert sv.password != "" passwd_check(sv.password, password) def test_list_running_servers(jp_serverapp, jp_web_app): servers = list(list_running_servers(jp_serverapp.runtime_dir)) assert len(servers) >= 1 @pytest.fixture def prefix_path(jp_root_dir, tmp_path): """If a given path is prefixed with the literal strings `/jp_root_dir` or `/tmp_path`, replace those strings with these fixtures. Returns a pathlib Path object. """ def _inner(rawpath): path = pathlib.PurePosixPath(rawpath) if rawpath.startswith("/jp_root_dir"): path = jp_root_dir.joinpath(*path.parts[2:]) elif rawpath.startswith("/tmp_path"): path = tmp_path.joinpath(*path.parts[2:]) return pathlib.Path(path) return _inner @pytest.mark.parametrize( "root_dir,file_to_run,expected_output", [ (None, "notebook.ipynb", "notebook.ipynb"), (None, "/tmp_path/path/to/notebook.ipynb", "notebook.ipynb"), ("/jp_root_dir", "/tmp_path/path/to/notebook.ipynb", SystemExit), ("/tmp_path", "/tmp_path/path/to/notebook.ipynb", "path/to/notebook.ipynb"), ("/jp_root_dir", "notebook.ipynb", "notebook.ipynb"), ("/jp_root_dir", "path/to/notebook.ipynb", "path/to/notebook.ipynb"), ], ) def test_resolve_file_to_run_and_root_dir(prefix_path, root_dir, file_to_run, expected_output): # Verify that the Singleton instance is cleared before the test runs. ServerApp.clear_instance() # Setup the file_to_run path, in case the server checks # if the directory exists before initializing the server. file_to_run = prefix_path(file_to_run) if file_to_run.is_absolute(): file_to_run.parent.mkdir(parents=True, exist_ok=True) kwargs = {"file_to_run": str(file_to_run)} # Setup the root_dir path, in case the server checks # if the directory exists before initializing the server. if root_dir: root_dir = prefix_path(root_dir) if root_dir.is_absolute(): root_dir.parent.mkdir(parents=True, exist_ok=True) kwargs["root_dir"] = str(root_dir) # Create the notebook in the given location serverapp = ServerApp.instance(**kwargs) if expected_output is SystemExit: with pytest.raises(SystemExit): serverapp._resolve_file_to_run_and_root_dir() else: relpath = serverapp._resolve_file_to_run_and_root_dir() assert relpath == str(pathlib.Path(expected_output)) # Clear the singleton instance after each run. ServerApp.clear_instance() # Test the URLs returned by ServerApp. The `` piece # in urls shown below will be replaced with the token # generated by the ServerApp on instance creation. @pytest.mark.parametrize( "config,public_url,local_url,connection_url", [ # Token is hidden when configured. ( {"token": "test"}, "http://localhost:8888/?token=...", "http://127.0.0.1:8888/?token=...", "http://localhost:8888/", ), # Verify port number has changed ( {"port": 9999}, "http://localhost:9999/?token=", "http://127.0.0.1:9999/?token=", "http://localhost:9999/", ), ( {"ip": "1.1.1.1"}, "http://1.1.1.1:8888/?token=", "http://127.0.0.1:8888/?token=", "http://1.1.1.1:8888/", ), # Verify that HTTPS is returned when certfile is given ( {"certfile": "/path/to/dummy/file"}, "https://localhost:8888/?token=", "https://127.0.0.1:8888/?token=", "https://localhost:8888/", ), # Verify changed port and a custom display URL ( {"port": 9999, "custom_display_url": "http://test.org"}, "http://test.org/?token=", "http://127.0.0.1:9999/?token=", "http://localhost:9999/", ), ( {"base_url": "/", "default_url": "/test/"}, "http://localhost:8888/test/?token=", "http://127.0.0.1:8888/test/?token=", "http://localhost:8888/", ), # Verify unix socket URLs are handled properly ( {"sock": "/tmp/jp-test.sock"}, "http+unix://%2Ftmp%2Fjp-test.sock/?token=", "http+unix://%2Ftmp%2Fjp-test.sock/?token=", "http+unix://%2Ftmp%2Fjp-test.sock/", ), ( {"base_url": "/", "default_url": "/test/", "sock": "/tmp/jp-test.sock"}, "http+unix://%2Ftmp%2Fjp-test.sock/test/?token=", "http+unix://%2Ftmp%2Fjp-test.sock/test/?token=", "http+unix://%2Ftmp%2Fjp-test.sock/", ), ], ) def test_urls(config, public_url, local_url, connection_url): # Verify we're working with a clean instance. ServerApp.clear_instance() serverapp = ServerApp.instance(**config) # If a token is generated (not set by config), update # expected_url with token. if serverapp._token_generated: public_url = public_url.replace("", serverapp.token) local_url = local_url.replace("", serverapp.token) connection_url = connection_url.replace("", serverapp.token) assert serverapp.public_url == public_url assert serverapp.local_url == local_url assert serverapp.connection_url == connection_url # Cleanup singleton after test. ServerApp.clear_instance() # Preferred dir tests # ---------------------------------------------------------------------------- def test_valid_preferred_dir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) assert app.root_dir == path assert app.preferred_dir == path assert app.root_dir == app.preferred_dir def test_valid_preferred_dir_is_root_subdir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) path_subdir = str(tmp_path / "subdir") os.makedirs(path_subdir, exist_ok=True) app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) assert app.root_dir == path assert app.preferred_dir == path_subdir assert app.preferred_dir.startswith(app.root_dir) def test_valid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): path = str(tmp_path) path_subdir = str(tmp_path / "subdir") with pytest.raises(TraitError) as error: app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) assert "No such preferred dir:" in str(error) def test_invalid_preferred_dir_does_not_exist(tmp_path, jp_configurable_serverapp): path = str(tmp_path) path_subdir = str(tmp_path / "subdir") with pytest.raises(TraitError) as error: app = jp_configurable_serverapp(root_dir=path, preferred_dir=path_subdir) assert "No such preferred dir:" in str(error) def test_invalid_preferred_dir_does_not_exist_set(tmp_path, jp_configurable_serverapp): path = str(tmp_path) path_subdir = str(tmp_path / "subdir") app = jp_configurable_serverapp(root_dir=path) with pytest.raises(TraitError) as error: app.preferred_dir = path_subdir assert "No such preferred dir:" in str(error) def test_invalid_preferred_dir_not_root_subdir(tmp_path, jp_configurable_serverapp): path = str(tmp_path / "subdir") os.makedirs(path, exist_ok=True) not_subdir_path = str(tmp_path) with pytest.raises(TraitError) as error: app = jp_configurable_serverapp(root_dir=path, preferred_dir=not_subdir_path) assert "preferred_dir must be equal or a subdir of root_dir:" in str(error) def test_invalid_preferred_dir_not_root_subdir_set(tmp_path, jp_configurable_serverapp): path = str(tmp_path / "subdir") os.makedirs(path, exist_ok=True) not_subdir_path = str(tmp_path) app = jp_configurable_serverapp(root_dir=path) with pytest.raises(TraitError) as error: app.preferred_dir = not_subdir_path assert "preferred_dir must be equal or a subdir of root_dir:" in str(error) def test_observed_root_dir_updates_preferred_dir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) new_path = str(tmp_path / "subdir") os.makedirs(new_path, exist_ok=True) app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) app.root_dir = new_path assert app.preferred_dir == new_path def test_observed_root_dir_does_not_update_preferred_dir(tmp_path, jp_configurable_serverapp): path = str(tmp_path) new_path = str(tmp_path.parent) app = jp_configurable_serverapp(root_dir=path, preferred_dir=path) app.root_dir = new_path assert app.preferred_dir == path jupyter_server-1.13.1/jupyter_server/tests/test_terminal.py000066400000000000000000000104641415445537200243620ustar00rootroot00000000000000import asyncio import json import os import shutil import pytest from tornado.httpclient import HTTPClientError from traitlets.config import Config @pytest.fixture def terminal_path(tmp_path): subdir = tmp_path.joinpath("terminal_path") subdir.mkdir() yield subdir shutil.rmtree(str(subdir), ignore_errors=True) CULL_TIMEOUT = 10 CULL_INTERVAL = 3 @pytest.fixture def jp_server_config(): return Config( { "ServerApp": { "TerminalManager": { "cull_inactive_timeout": CULL_TIMEOUT, "cull_interval": CULL_INTERVAL, } } } ) async def test_no_terminals(jp_fetch): resp_list = await jp_fetch( "api", "terminals", method="GET", allow_nonstandard_methods=True, ) data = json.loads(resp_list.body.decode()) assert len(data) == 0 async def test_terminal_create(jp_fetch, jp_cleanup_subprocesses): resp = await jp_fetch( "api", "terminals", method="POST", allow_nonstandard_methods=True, ) term = json.loads(resp.body.decode()) assert term["name"] == "1" resp_list = await jp_fetch( "api", "terminals", method="GET", allow_nonstandard_methods=True, ) data = json.loads(resp_list.body.decode()) assert len(data) == 1 assert data[0] == term await jp_cleanup_subprocesses() async def test_terminal_create_with_kwargs( jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses ): resp_create = await jp_fetch( "api", "terminals", method="POST", body=json.dumps({"cwd": str(terminal_path)}), allow_nonstandard_methods=True, ) data = json.loads(resp_create.body.decode()) term_name = data["name"] resp_get = await jp_fetch( "api", "terminals", term_name, method="GET", allow_nonstandard_methods=True, ) data = json.loads(resp_get.body.decode()) assert data["name"] == term_name await jp_cleanup_subprocesses() async def test_terminal_create_with_cwd( jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses ): resp = await jp_fetch( "api", "terminals", method="POST", body=json.dumps({"cwd": str(terminal_path)}), allow_nonstandard_methods=True, ) data = json.loads(resp.body.decode()) term_name = data["name"] ws = await jp_ws_fetch("terminals", "websocket", term_name) ws.write_message(json.dumps(["stdin", "pwd\r\n"])) message_stdout = "" while True: try: message = await asyncio.wait_for(ws.read_message(), timeout=5.0) except asyncio.TimeoutError: break message = json.loads(message) if message[0] == "stdout": message_stdout += message[1] ws.close() assert os.path.basename(terminal_path) in message_stdout await jp_cleanup_subprocesses() async def test_culling_config(jp_server_config, jp_configurable_serverapp): terminal_mgr_config = jp_configurable_serverapp().config.ServerApp.TerminalManager assert terminal_mgr_config.cull_inactive_timeout == CULL_TIMEOUT assert terminal_mgr_config.cull_interval == CULL_INTERVAL terminal_mgr_settings = jp_configurable_serverapp().web_app.settings["terminal_manager"] assert terminal_mgr_settings.cull_inactive_timeout == CULL_TIMEOUT assert terminal_mgr_settings.cull_interval == CULL_INTERVAL async def test_culling(jp_server_config, jp_fetch, jp_cleanup_subprocesses): # POST request resp = await jp_fetch( "api", "terminals", method="POST", allow_nonstandard_methods=True, ) term = json.loads(resp.body.decode()) term_1 = term["name"] last_activity = term["last_activity"] culled = False for i in range(CULL_TIMEOUT + CULL_INTERVAL): try: resp = await jp_fetch( "api", "terminals", term_1, method="GET", allow_nonstandard_methods=True, ) except HTTPClientError as e: assert e.code == 404 culled = True break else: await asyncio.sleep(1) assert culled await jp_cleanup_subprocesses() jupyter_server-1.13.1/jupyter_server/tests/test_traittypes.py000066400000000000000000000033641415445537200247600ustar00rootroot00000000000000import pytest from traitlets import HasTraits from traitlets import TraitError from traitlets.utils.importstring import import_item from jupyter_server.services.contents.largefilemanager import LargeFileManager from jupyter_server.traittypes import InstanceFromClasses from jupyter_server.traittypes import TypeFromClasses class DummyClass: """Dummy class for testing Instance""" class DummyInt(int): """Dummy class for testing types.""" class Thing(HasTraits): a = InstanceFromClasses( default_value=2, klasses=[ int, str, DummyClass, ], ) b = TypeFromClasses( default_value=None, allow_none=True, klasses=[DummyClass, int, "jupyter_server.services.contents.manager.ContentsManager"], ) class TestInstanceFromClasses: @pytest.mark.parametrize("value", [1, "test", DummyClass()]) def test_good_values(self, value): thing = Thing(a=value) assert thing.a == value @pytest.mark.parametrize("value", [2.4, object()]) def test_bad_values(self, value): with pytest.raises(TraitError) as e: thing = Thing(a=value) class TestTypeFromClasses: @pytest.mark.parametrize( "value", [ DummyClass, DummyInt, LargeFileManager, "jupyter_server.services.contents.manager.ContentsManager", ], ) def test_good_values(self, value): thing = Thing(b=value) if isinstance(value, str): value = import_item(value) assert thing.b == value @pytest.mark.parametrize("value", [float, object]) def test_bad_values(self, value): with pytest.raises(TraitError) as e: thing = Thing(b=value) jupyter_server-1.13.1/jupyter_server/tests/test_utils.py000066400000000000000000000035001415445537200237000ustar00rootroot00000000000000from pathlib import Path from unittest.mock import patch import pytest from traitlets.tests.utils import check_help_all_output from jupyter_server.utils import is_namespace_package from jupyter_server.utils import url_escape from jupyter_server.utils import url_unescape def test_help_output(): check_help_all_output("jupyter_server") @pytest.mark.parametrize( "unescaped,escaped", [ ("/this is a test/for spaces/", "/this%20is%20a%20test/for%20spaces/"), ("notebook with space.ipynb", "notebook%20with%20space.ipynb"), ("/path with a/notebook and space.ipynb", "/path%20with%20a/notebook%20and%20space.ipynb"), ( "/ !@$#%^&* / test %^ notebook @#$ name.ipynb", "/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb", ), ], ) def test_url_escaping(unescaped, escaped): # Test escaping. path = url_escape(unescaped) assert path == escaped # Test unescaping. path = url_unescape(escaped) assert path == unescaped @pytest.mark.parametrize( "name, expected", [ # returns True if it is a namespace package ("test_namespace", True), # returns False if it isn't a namespace package ("sys", False), ("jupyter_server", False), # returns None if it isn't importable ("not_a_python_namespace", None), ], ) def test_is_namespace_package(monkeypatch, name, expected): monkeypatch.syspath_prepend(Path(__file__).parent / "namespace-package-test") assert is_namespace_package(name) is expected def test_is_namespace_package_no_spec(): with patch("importlib.util.find_spec") as mocked_spec: mocked_spec.side_effect = ValueError() assert is_namespace_package("dummy") is None mocked_spec.assert_called_once_with("dummy") jupyter_server-1.13.1/jupyter_server/tests/test_version.py000066400000000000000000000021511415445537200242260ustar00rootroot00000000000000import re import pytest from jupyter_server import __version__ pep440re = re.compile(r"^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$") def raise_on_bad_version(version): if not pep440re.match(version): raise ValueError( "Versions String does apparently not match Pep 440 specification, " "which might lead to sdist and wheel being seen as 2 different release. " "E.g: do not use dots for beta/alpha/rc markers." ) # --------- Meta test to test the versioning tests ------------- @pytest.mark.parametrize( "version", [ "4.1.0.b1", "4.1.b1", "4.2", "X.y.z", "1.2.3.dev1.post2", ], ) def test_invalid_pep440_versions(version): with pytest.raises(ValueError): raise_on_bad_version(version) @pytest.mark.parametrize( "version", [ "4.1.1", "4.2.1b3", ], ) def test_valid_pep440_versions(version): assert raise_on_bad_version(version) is None # --------- Test current version -------------- def test_current_version(): raise_on_bad_version(__version__) jupyter_server-1.13.1/jupyter_server/tests/test_view.py000066400000000000000000000032001415445537200235070ustar00rootroot00000000000000"""test view handler""" from html.parser import HTMLParser import pytest import tornado from .utils import expected_http_error from jupyter_server.utils import url_path_join class IFrameSrcFinder(HTMLParser): """Minimal HTML parser to find iframe.src attr""" def __init__(self): super().__init__() self.iframe_src = None def handle_starttag(self, tag, attrs): if tag.lower() == "iframe": for attr, value in attrs: if attr.lower() == "src": self.iframe_src = value return def find_iframe_src(html): """Find the src= attr of an iframe on the page Assumes only one iframe """ finder = IFrameSrcFinder() finder.feed(html) return finder.iframe_src @pytest.mark.parametrize( "exists, name", [ (False, "nosuchfile.html"), (False, "nosuchfile.bin"), (True, "exists.html"), (True, "exists.bin"), ], ) async def test_view(jp_fetch, jp_serverapp, jp_root_dir, exists, name): """Test /view/$path for a few cases""" if exists: jp_root_dir.joinpath(name).write_text(name) if not exists: with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("view", name, method="GET") assert expected_http_error(e, 404), [name, e] else: r = await jp_fetch("view", name, method="GET") assert r.code == 200 assert r.headers["content-type"] == "text/html; charset=UTF-8" html = r.body.decode() src = find_iframe_src(html) assert src == url_path_join(jp_serverapp.base_url, f"/files/{name}") jupyter_server-1.13.1/jupyter_server/tests/unix_sockets/000077500000000000000000000000001415445537200236475ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/unix_sockets/__init__.py000066400000000000000000000000001415445537200257460ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/tests/unix_sockets/conftest.py000066400000000000000000000016411415445537200260500ustar00rootroot00000000000000import os import pathlib import pytest from jupyter_server import DEFAULT_JUPYTER_SERVER_PORT @pytest.fixture def jp_process_id(): """Choose a random unused process ID.""" return os.getpid() @pytest.fixture def jp_unix_socket_file(jp_process_id): """Define a temporary socket connection""" # Rely on `/tmp` to avoid any Linux socket length max buffer # issues. Key on PID for process-wise concurrency. tmp_path = pathlib.Path("/tmp") filename = "jupyter_server.{}.sock".format(jp_process_id) jp_unix_socket_file = tmp_path.joinpath(filename) yield str(jp_unix_socket_file) # Clean up the file after the test runs. if jp_unix_socket_file.exists(): jp_unix_socket_file.unlink() @pytest.fixture def jp_http_port(): """Set the port to the default value, since sock and port cannot both be configured at the same time. """ return DEFAULT_JUPYTER_SERVER_PORT jupyter_server-1.13.1/jupyter_server/tests/unix_sockets/test_api.py000066400000000000000000000043151415445537200260340ustar00rootroot00000000000000import sys import pytest # Skip this module if on Windows. Unix sockets are not available on Windows. pytestmark = pytest.mark.skipif( sys.platform.startswith("win"), reason="Unix sockets are not available on Windows." ) import urllib if not sys.platform.startswith("win"): from tornado.netutil import bind_unix_socket import jupyter_server.serverapp from jupyter_server.utils import ( url_path_join, urlencode_unix_socket, async_fetch, ) @pytest.fixture def jp_server_config(jp_unix_socket_file): """Configure the serverapp fixture with the unix socket.""" return {"ServerApp": {"sock": jp_unix_socket_file, "allow_remote_access": True}} @pytest.fixture def http_server_port(jp_unix_socket_file, jp_process_id): """Unix socket and process ID used by tornado's HTTP Server. Overrides the http_server_port fixture from pytest-tornasync and replaces it with a tuple: (unix socket, process id) """ return (bind_unix_socket(jp_unix_socket_file), jp_process_id) @pytest.fixture def jp_unix_socket_fetch(jp_unix_socket_file, jp_auth_header, jp_base_url, http_server, io_loop): """A fetch fixture for Jupyter Server tests that use the unix_serverapp fixture""" async def client(*parts, headers={}, params={}, **kwargs): # Handle URL strings host_url = urlencode_unix_socket(jp_unix_socket_file) path_url = url_path_join(jp_base_url, *parts) params_url = urllib.parse.urlencode(params) url = url_path_join(host_url, path_url + "?" + params_url) r = await async_fetch(url, headers=headers, io_loop=io_loop, **kwargs) return r return client async def test_get_spec(jp_unix_socket_fetch): # Handle URL strings parts = ["api", "spec.yaml"] # Make request and verify it succeeds.' response = await jp_unix_socket_fetch(*parts) assert response.code == 200 assert response.body != None async def test_list_running_servers(jp_unix_socket_file, http_server): """Test that a server running on unix sockets is discovered by the server list""" servers = list(jupyter_server.serverapp.list_running_servers()) assert len(servers) >= 1 assert jp_unix_socket_file in {info["sock"] for info in servers} jupyter_server-1.13.1/jupyter_server/tests/unix_sockets/test_serverapp_integration.py000066400000000000000000000123051415445537200316730ustar00rootroot00000000000000import stat import sys import pytest # Skip this module if on Windows. Unix sockets are not available on Windows. pytestmark = pytest.mark.skipif( sys.platform.startswith("win"), reason="Unix sockets are not available on Windows." ) import os import subprocess import time from jupyter_server.utils import urlencode_unix_socket, urlencode_unix_socket_path @pytest.mark.integration_test def test_shutdown_sock_server_integration(jp_unix_socket_file): url = urlencode_unix_socket(jp_unix_socket_file).encode() encoded_sock_path = urlencode_unix_socket_path(jp_unix_socket_file) p = subprocess.Popen( ["jupyter-server", "--sock=%s" % jp_unix_socket_file, "--sock-mode=0700"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) complete = False for line in iter(p.stderr.readline, b""): if url in line: complete = True break assert complete, "did not find socket URL in stdout when launching notebook" socket_path = encoded_sock_path.encode() assert socket_path in subprocess.check_output(["jupyter-server", "list"]) # Ensure umask is properly applied. assert stat.S_IMODE(os.lstat(jp_unix_socket_file).st_mode) == 0o700 try: subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: assert "There is currently no server running on" in e.output.decode() else: raise AssertionError("expected stop command to fail due to target mis-match") assert encoded_sock_path.encode() in subprocess.check_output(["jupyter-server", "list"]) subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]) assert encoded_sock_path.encode() not in subprocess.check_output(["jupyter-server", "list"]) p.wait() @pytest.mark.integration_test def test_sock_server_validate_sockmode_type(): try: subprocess.check_output( ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=badbadbad"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: assert "badbadbad" in e.output.decode() else: raise AssertionError("expected execution to fail due to validation of --sock-mode param") @pytest.mark.integration_test def test_sock_server_validate_sockmode_accessible(): try: subprocess.check_output( ["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=0444"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: assert "0444" in e.output.decode() else: raise AssertionError("expected execution to fail due to validation of --sock-mode param") def _ensure_stopped(check_msg="There are no running servers"): try: subprocess.check_output(["jupyter-server", "stop"], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: assert check_msg in e.output.decode() else: raise AssertionError("expected all servers to be stopped") @pytest.mark.integration_test def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): """Tests lifecycle behavior for mixed-mode server types w/ default ports. Mostly suitable for local dev testing due to reliance on default port binding. """ TEST_PORT = "9797" MSG_TMPL = "Shutting down server on {}..." _ensure_stopped() # Default port. p1 = subprocess.Popen(["jupyter-server", "--no-browser"]) # Unix socket. p2 = subprocess.Popen(["jupyter-server", "--sock=%s" % jp_unix_socket_file]) # Specified port p3 = subprocess.Popen(["jupyter-server", "--no-browser", "--port=%s" % TEST_PORT]) time.sleep(3) shutdown_msg = MSG_TMPL.format(jp_http_port) assert shutdown_msg in subprocess.check_output(["jupyter-server", "stop"]).decode() _ensure_stopped("There is currently no server running on 8888") assert ( MSG_TMPL.format(jp_unix_socket_file) in subprocess.check_output(["jupyter-server", "stop", jp_unix_socket_file]).decode() ) assert ( MSG_TMPL.format(TEST_PORT) in subprocess.check_output(["jupyter-server", "stop", TEST_PORT]).decode() ) _ensure_stopped() p1.wait() p2.wait() p3.wait() @pytest.mark.integration_test def test_launch_socket_collision(jp_unix_socket_file): """Tests UNIX socket in-use detection for lifecycle correctness.""" sock = jp_unix_socket_file check_msg = "socket %s is already in use" % sock _ensure_stopped() # Start a server. cmd = ["jupyter-server", "--sock=%s" % sock] p1 = subprocess.Popen(cmd) time.sleep(3) # Try to start a server bound to the same UNIX socket. try: subprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as cpe: assert check_msg in cpe.output.decode() except Exception as ex: raise AssertionError(f"expected 'already in use' error, got '{ex}'!") else: raise AssertionError("expected 'already in use' error, got success instead!") # Stop the background server, ensure it's stopped and wait on the process to exit. subprocess.check_call(["jupyter-server", "stop", sock]) _ensure_stopped() p1.wait() jupyter_server-1.13.1/jupyter_server/tests/utils.py000066400000000000000000000021611415445537200226430ustar00rootroot00000000000000import json import tornado some_resource = u"The very model of a modern major general" sample_kernel_json = { "argv": ["cat", "{connection_file}"], "display_name": "Test kernel", } def mkdir(tmp_path, *parts): path = tmp_path.joinpath(*parts) if not path.exists(): path.mkdir(parents=True) return path def expected_http_error(error, expected_code, expected_message=None): """Check that the error matches the expected output error.""" e = error.value if isinstance(e, tornado.web.HTTPError): if expected_code != e.status_code: return False if expected_message is not None and expected_message != str(e): return False return True elif any( [ isinstance(e, tornado.httpclient.HTTPClientError), isinstance(e, tornado.httpclient.HTTPError), ] ): if expected_code != e.code: return False if expected_message: message = json.loads(e.response.body.decode())["message"] if expected_message != message: return False return True jupyter_server-1.13.1/jupyter_server/traittypes.py000066400000000000000000000342151415445537200225560ustar00rootroot00000000000000import inspect from ast import literal_eval from traitlets import ClassBasedTraitType from traitlets import TraitError from traitlets import Undefined # Traitlet's 5.x includes a set of utilities for building # description strings for objects. Traitlets 5.x does not # support Python 3.6, but jupyter_server does; instead # jupyter_server uses traitlets 4.3.x which doesn't have # this `descriptions` submodule. This chunk in the except # clause is a copy-and-paste from traitlets 5.0.5. try: from traitlets.utils.descriptions import describe except ImportError: import re import types def describe(article, value, name=None, verbose=False, capital=False): """Return string that describes a value Parameters ---------- article : str or None A definite or indefinite article. If the article is indefinite (i.e. "a" or "an") the appropriate one will be infered. Thus, the arguments of ``describe`` can themselves represent what the resulting string will actually look like. If None, then no article will be prepended to the result. For non-articled description, values that are instances are treated definitely, while classes are handled indefinitely. value : any The value which will be named. name : str or None (default: None) Only applies when ``article`` is "the" - this ``name`` is a definite reference to the value. By default one will be infered from the value's type and repr methods. verbose : bool (default: False) Whether the name should be concise or verbose. When possible, verbose names include the module, and/or class name where an object was defined. capital : bool (default: False) Whether the first letter of the article should be capitalized or not. By default it is not. Examples -------- Indefinite description: >>> describe("a", object()) 'an object' >>> describe("a", object) 'an object' >>> describe("a", type(object)) 'a type' Definite description: >>> describe("the", object()) # doctest: +ELLIPSIS "the object at '0x...'" >>> describe("the", object) 'the object object' >>> describe("the", type(object)) 'the type type' Definitely named description: >>> describe("the", object(), "I made") 'the object I made' >>> describe("the", object, "I will use") 'the object I will use' """ if isinstance(article, str): article = article.lower() if not inspect.isclass(value): typename = type(value).__name__ else: typename = value.__name__ if verbose: typename = _prefix(value) + typename if article == "the" or (article is None and not inspect.isclass(value)): if name is not None: result = "{} {}".format(typename, name) if article is not None: return add_article(result, True, capital) else: return result else: tick_wrap = False if inspect.isclass(value): name = value.__name__ elif isinstance(value, types.FunctionType): name = value.__name__ tick_wrap = True elif isinstance(value, types.MethodType): name = value.__func__.__name__ tick_wrap = True elif type(value).__repr__ in (object.__repr__, type.__repr__): name = "at '%s'" % hex(id(value)) verbose = False else: name = repr(value) verbose = False if verbose: name = _prefix(value) + name if tick_wrap: name = name.join("''") return describe(article, value, name=name, verbose=verbose, capital=capital) elif article in ("a", "an") or article is None: if article is None: return typename return add_article(typename, False, capital) else: raise ValueError( "The 'article' argument should " "be 'the', 'a', 'an', or None not %r" % article ) def add_article(name, definite=False, capital=False): """Returns the string with a prepended article. The input does not need to begin with a charater. Parameters ---------- definite : bool (default: False) Whether the article is definite or not. Indefinite articles being 'a' and 'an', while 'the' is definite. capital : bool (default: False) Whether the added article should have its first letter capitalized or not. """ if definite: result = "the " + name else: first_letters = re.compile(r"[\W_]+").sub("", name) if first_letters[:1].lower() in "aeiou": result = "an " + name else: result = "a " + name if capital: return result[0].upper() + result[1:] else: return result def _prefix(value): if isinstance(value, types.MethodType): name = describe(None, value.__self__, verbose=True) + "." else: module = inspect.getmodule(value) if module is not None and module.__name__ != "builtins": name = module.__name__ + "." else: name = "" return name class TypeFromClasses(ClassBasedTraitType): """A trait whose value must be a subclass of a class in a specified list of classes.""" def __init__(self, default_value=Undefined, klasses=None, **kwargs): """Construct a Type trait A Type trait specifies that its values must be subclasses of a class in a list of possible classes. If only ``default_value`` is given, it is used for the ``klasses`` as well. If neither are given, both default to ``object``. Parameters ---------- default_value : class, str or None The default value must be a subclass of klass. If an str, the str must be a fully specified class name, like 'foo.bar.Bah'. The string is resolved into real class, when the parent :class:`HasTraits` class is instantiated. klasses : list of class, str [ default object ] Values of this trait must be a subclass of klass. The klass may be specified in a string like: 'foo.bar.MyClass'. The string is resolved into real class, when the parent :class:`HasTraits` class is instantiated. allow_none : bool [ default False ] Indicates whether None is allowed as an assignable value. """ if default_value is Undefined: new_default_value = object if (klasses is None) else klasses else: new_default_value = default_value if klasses is None: if (default_value is None) or (default_value is Undefined): klasses = [object] else: klasses = [default_value] # OneOfType requires a list of klasses to be specified (different than Type). if not isinstance(klasses, (list, tuple, set)): raise TraitError("`klasses` must be a list of class names (type is str) or classes.") for klass in klasses: if not (inspect.isclass(klass) or isinstance(klass, str)): raise TraitError("A OneOfType trait must specify a list of classes.") # Store classes. self.klasses = klasses super().__init__(new_default_value, **kwargs) def subclass_from_klasses(self, value): "Check that a given class is a subclasses found in the klasses list." return any(issubclass(value, klass) for klass in self.importable_klasses) def validate(self, obj, value): """Validates that the value is a valid object instance.""" if isinstance(value, str): try: value = self._resolve_string(value) except ImportError: raise TraitError( "The '%s' trait of %s instance must be a type, but " "%r could not be imported" % (self.name, obj, value) ) try: if self.subclass_from_klasses(value): return value except Exception: pass self.error(obj, value) def info(self): """Returns a description of the trait.""" result = "a subclass of " for klass in self.klasses: if not isinstance(klass, str): klass = klass.__module__ + "." + klass.__name__ result += f"{klass} or " # Strip the last "or" result = result.strip(" or ") if self.allow_none: return result + " or None" return result def instance_init(self, obj): self._resolve_classes() super().instance_init(obj) def _resolve_classes(self): # Resolve all string names to actual classes. self.importable_klasses = [] for klass in self.klasses: if isinstance(klass, str): # Try importing the classes to compare. Silently, ignore if not importable. try: klass = self._resolve_string(klass) self.importable_klasses.append(klass) except: pass else: self.importable_klasses.append(klass) if isinstance(self.default_value, str): self.default_value = self._resolve_string(self.default_value) def default_value_repr(self): value = self.default_value if isinstance(value, str): return repr(value) else: return repr(f"{value.__module__}.{value.__name__}") class InstanceFromClasses(ClassBasedTraitType): """A trait whose value must be an instance of a class in a specified list of classes. The value can also be an instance of a subclass of the specified classes. Subclasses can declare default classes by overriding the klass attribute """ def __init__(self, klasses=None, args=None, kw=None, **kwargs): """Construct an Instance trait. This trait allows values that are instances of a particular class or its subclasses. Our implementation is quite different from that of enthough.traits as we don't allow instances to be used for klass and we handle the ``args`` and ``kw`` arguments differently. Parameters ---------- klasses : list of classes or class_names (str) The class that forms the basis for the trait. Class names can also be specified as strings, like 'foo.bar.Bar'. args : tuple Positional arguments for generating the default value. kw : dict Keyword arguments for generating the default value. allow_none : bool [ default False ] Indicates whether None is allowed as a value. Notes ----- If both ``args`` and ``kw`` are None, then the default value is None. If ``args`` is a tuple and ``kw`` is a dict, then the default is created as ``klass(*args, **kw)``. If exactly one of ``args`` or ``kw`` is None, the None is replaced by ``()`` or ``{}``, respectively. """ # If class if klasses is None: self.klasses = klasses # Verify all elements are either classes or strings. elif all(inspect.isclass(k) or isinstance(k, str) for k in klasses): self.klasses = klasses else: raise TraitError( "The klasses attribute must be a list of class names or classes" " not: %r" % klasses ) if (kw is not None) and not isinstance(kw, dict): raise TraitError("The 'kw' argument must be a dict or None.") if (args is not None) and not isinstance(args, tuple): raise TraitError("The 'args' argument must be a tuple or None.") self.default_args = args self.default_kwargs = kw super(InstanceFromClasses, self).__init__(**kwargs) def instance_from_importable_klasses(self, value): "Check that a given class is a subclasses found in the klasses list." return any(isinstance(value, klass) for klass in self.importable_klasses) def validate(self, obj, value): if self.instance_from_importable_klasses(value): return value else: self.error(obj, value) def info(self): result = "an instance of " for klass in self.klasses: if isinstance(klass, str): result += klass else: result += describe("a", klass) result += " or " result = result.strip(" or ") if self.allow_none: result += " or None" return result def instance_init(self, obj): self._resolve_classes() super().instance_init(obj) def _resolve_classes(self): # Resolve all string names to actual classes. self.importable_klasses = [] for klass in self.klasses: if isinstance(klass, str): # Try importing the classes to compare. Silently, ignore if not importable. try: klass = self._resolve_string(klass) self.importable_klasses.append(klass) except: pass else: self.importable_klasses.append(klass) def make_dynamic_default(self): if (self.default_args is None) and (self.default_kwargs is None): return None return self.klass(*(self.default_args or ()), **(self.default_kwargs or {})) def default_value_repr(self): return repr(self.make_dynamic_default()) def from_string(self, s): return literal_eval(s) jupyter_server-1.13.1/jupyter_server/transutils.py000066400000000000000000000013171415445537200225530ustar00rootroot00000000000000"""Translation related utilities. When imported, injects _ to builtins""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import gettext import os import warnings def _trans_gettext_deprecation_helper(*args, **kwargs): warn_msg = "The alias `_()` will be deprecated. Use `_i18n()` instead." warnings.warn(warn_msg, FutureWarning, stacklevel=2) return trans.gettext(*args, **kwargs) # Set up message catalog access base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) trans = gettext.translation( "notebook", localedir=os.path.join(base_dir, "notebook/i18n"), fallback=True ) _ = _trans_gettext_deprecation_helper _i18n = trans.gettext jupyter_server-1.13.1/jupyter_server/utils.py000066400000000000000000000270021415445537200215020ustar00rootroot00000000000000"""Notebook related utilities""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import errno import importlib.util import inspect import os import socket import sys from _frozen_importlib_external import _NamespacePath from contextlib import contextmanager from distutils.version import LooseVersion from urllib.parse import quote from urllib.parse import SplitResult from urllib.parse import unquote from urllib.parse import urljoin # pylint: disable=unused-import from urllib.parse import urlparse from urllib.parse import urlsplit from urllib.parse import urlunsplit from urllib.request import pathname2url # pylint: disable=unused-import from tornado.httpclient import AsyncHTTPClient from tornado.httpclient import HTTPClient from tornado.httpclient import HTTPRequest from tornado.netutil import Resolver def url_path_join(*pieces): """Join components of url into a relative url Use to prevent double slash when joining subpath. This will leave the initial and final / in place """ initial = pieces[0].startswith("/") final = pieces[-1].endswith("/") stripped = [s.strip("/") for s in pieces] result = "/".join(s for s in stripped if s) if initial: result = "/" + result if final: result = result + "/" if result == "//": result = "/" return result def url_is_absolute(url): """Determine whether a given URL is absolute""" return urlparse(url).path.startswith("/") def path2url(path): """Convert a local file path to a URL""" pieces = [quote(p) for p in path.split(os.sep)] # preserve trailing / if pieces[-1] == "": pieces[-1] = "/" url = url_path_join(*pieces) return url def url2path(url): """Convert a URL to a local file path""" pieces = [unquote(p) for p in url.split("/")] path = os.path.join(*pieces) return path def url_escape(path): """Escape special characters in a URL path Turns '/foo bar/' into '/foo%20bar/' """ parts = path.split("/") return "/".join([quote(p) for p in parts]) def url_unescape(path): """Unescape special characters in a URL path Turns '/foo%20bar/' into '/foo bar/' """ return "/".join([unquote(p) for p in path.split("/")]) def samefile_simple(path, other_path): """ Fill in for os.path.samefile when it is unavailable (Windows+py2). Do a case-insensitive string comparison in this case plus comparing the full stat result (including times) because Windows + py2 doesn't support the stat fields needed for identifying if it's the same file (st_ino, st_dev). Only to be used if os.path.samefile is not available. Parameters ---------- path : String representing a path to a file other_path : String representing a path to another file Returns ------- same: Boolean that is True if both path and other path are the same """ path_stat = os.stat(path) other_path_stat = os.stat(other_path) return path.lower() == other_path.lower() and path_stat == other_path_stat def to_os_path(path, root=""): """Convert an API path to a filesystem path If given, root will be prepended to the path. root must be a filesystem path already. """ parts = path.strip("/").split("/") parts = [p for p in parts if p != ""] # remove duplicate splits path = os.path.join(root, *parts) return path def to_api_path(os_path, root=""): """Convert a filesystem path to an API path If given, root will be removed from the path. root must be a filesystem path already. """ if os_path.startswith(root): os_path = os_path[len(root) :] parts = os_path.strip(os.path.sep).split(os.path.sep) parts = [p for p in parts if p != ""] # remove duplicate splits path = "/".join(parts) return path def check_version(v, check): """check version string v >= check If dev/prerelease tags result in TypeError for string-number comparison, it is assumed that the dependency is satisfied. Users on dev branches are responsible for keeping their own packages up to date. """ try: return LooseVersion(v) >= LooseVersion(check) except TypeError: return True # Copy of IPython.utils.process.check_pid: def _check_pid_win32(pid): import ctypes # OpenProcess returns 0 if no such process (of ours) exists # positive int otherwise return bool(ctypes.windll.kernel32.OpenProcess(1, 0, pid)) def _check_pid_posix(pid): """Copy of IPython.utils.process.check_pid""" try: os.kill(pid, 0) except OSError as err: if err.errno == errno.ESRCH: return False elif err.errno == errno.EPERM: # Don't have permission to signal the process - probably means it exists return True raise else: return True if sys.platform == "win32": check_pid = _check_pid_win32 else: check_pid = _check_pid_posix async def ensure_async(obj): """Convert a non-awaitable object to a coroutine if needed, and await it if it was not already awaited. """ if inspect.isawaitable(obj): try: result = await obj except RuntimeError as e: if str(e) == "cannot reuse already awaited coroutine": # obj is already the coroutine's result return obj raise return result # obj doesn't need to be awaited return obj def run_sync(maybe_async): """If async, runs maybe_async and blocks until it has executed, possibly creating an event loop. If not async, just returns maybe_async as it is the result of something that has already executed. Parameters ---------- maybe_async : async or non-async object The object to be executed, if it is async. Returns ------- result Whatever the async object returns, or the object itself. """ if not inspect.isawaitable(maybe_async): # that was not something async, just return it return maybe_async # it is async, we need to run it in an event loop def wrapped(): create_new_event_loop = False try: loop = asyncio.get_event_loop() except RuntimeError: create_new_event_loop = True else: if loop.is_closed(): create_new_event_loop = True if create_new_event_loop: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) try: result = loop.run_until_complete(maybe_async) except RuntimeError as e: if str(e) == "This event loop is already running": # just return a Future, hoping that it will be awaited result = asyncio.ensure_future(maybe_async) else: raise e return result return wrapped() async def run_sync_in_loop(maybe_async): """Runs a function synchronously whether it is an async function or not. If async, runs maybe_async and blocks until it has executed. If not async, just returns maybe_async as it is the result of something that has already executed. Parameters ---------- maybe_async : async or non-async object The object to be executed, if it is async. Returns ------- result Whatever the async object returns, or the object itself. """ if not inspect.isawaitable(maybe_async): return maybe_async return await maybe_async def urlencode_unix_socket_path(socket_path): """Encodes a UNIX socket path string from a socket path for the `http+unix` URI form.""" return socket_path.replace("/", "%2F") def urldecode_unix_socket_path(socket_path): """Decodes a UNIX sock path string from an encoded sock path for the `http+unix` URI form.""" return socket_path.replace("%2F", "/") def urlencode_unix_socket(socket_path): """Encodes a UNIX socket URL from a socket path for the `http+unix` URI form.""" return "http+unix://%s" % urlencode_unix_socket_path(socket_path) def unix_socket_in_use(socket_path): """Checks whether a UNIX socket path on disk is in use by attempting to connect to it.""" if not os.path.exists(socket_path): return False try: sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(socket_path) except socket.error: return False else: return True finally: sock.close() @contextmanager def _request_for_tornado_client(urlstring, method="GET", body=None, headers=None): """A utility that provides a context that handles HTTP, HTTPS, and HTTP+UNIX request. Creates a tornado HTTPRequest object with a URL that tornado's HTTPClients can accept. If the request is made to a unix socket, temporarily configure the AsyncHTTPClient to resolve the URL and connect to the proper socket. """ parts = urlsplit(urlstring) if parts.scheme in ["http", "https"]: pass elif parts.scheme == "http+unix": # If unix socket, mimic HTTP. parts = SplitResult( scheme="http", netloc=parts.netloc, path=parts.path, query=parts.query, fragment=parts.fragment, ) class UnixSocketResolver(Resolver): """A resolver that routes HTTP requests to unix sockets in tornado HTTP clients. Due to constraints in Tornados' API, the scheme of the must be `http` (not `http+unix`). Applications should replace the scheme in URLS before making a request to the HTTP client. """ def initialize(self, resolver): self.resolver = resolver def close(self): self.resolver.close() async def resolve(self, host, port, *args, **kwargs): return [(socket.AF_UNIX, urldecode_unix_socket_path(host))] resolver = UnixSocketResolver(resolver=Resolver()) AsyncHTTPClient.configure(None, resolver=resolver) else: raise Exception("Unknown URL scheme.") # Yield the request for the given client. url = urlunsplit(parts) request = HTTPRequest(url, method=method, body=body, headers=headers) yield request def fetch(urlstring, method="GET", body=None, headers=None): """ Send a HTTP, HTTPS, or HTTP+UNIX request to a Tornado Web Server. Returns a tornado HTTPResponse. """ with _request_for_tornado_client(urlstring) as request: response = HTTPClient(AsyncHTTPClient).fetch(request) return response async def async_fetch(urlstring, method="GET", body=None, headers=None, io_loop=None): """ Send an asynchronous HTTP, HTTPS, or HTTP+UNIX request to a Tornado Web Server. Returns a tornado HTTPResponse. """ with _request_for_tornado_client(urlstring) as request: response = await AsyncHTTPClient(io_loop).fetch(request) return response def is_namespace_package(namespace): """Is the provided namespace a Python Namespace Package (PEP420). https://www.python.org/dev/peps/pep-0420/#specification Returns `None` if module is not importable. """ # NOTE: using submodule_search_locations because the loader can be None try: spec = importlib.util.find_spec(namespace) except ValueError: # spec is not set - see https://docs.python.org/3/library/importlib.html#importlib.util.find_spec return None if not spec: # e.g. module not installed return None return isinstance(spec.submodule_search_locations, _NamespacePath) jupyter_server-1.13.1/jupyter_server/view/000077500000000000000000000000001415445537200207415ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/view/__init__.py000066400000000000000000000000001415445537200230400ustar00rootroot00000000000000jupyter_server-1.13.1/jupyter_server/view/handlers.py000066400000000000000000000017121415445537200231140ustar00rootroot00000000000000# encoding: utf-8 """Tornado handlers for viewing HTML files.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from tornado import web from ..base.handlers import JupyterHandler from ..base.handlers import path_regex from ..utils import ensure_async from ..utils import url_escape from ..utils import url_path_join class ViewHandler(JupyterHandler): """Render HTML files within an iframe.""" @web.authenticated async def get(self, path): path = path.strip("/") if not await ensure_async(self.contents_manager.file_exists(path)): raise web.HTTPError(404, u"File does not exist: %s" % path) basename = path.rsplit("/", 1)[-1] file_url = url_path_join(self.base_url, "files", url_escape(path)) self.write(self.render_template("view.html", file_url=file_url, page_title=basename)) default_handlers = [ (r"/view%s" % path_regex, ViewHandler), ] jupyter_server-1.13.1/package-lock.json000066400000000000000000000307531415445537200201230ustar00rootroot00000000000000{ "name": "jupyter_server", "version": "1.0.0", "lockfileVersion": 1, "requires": true, "dependencies": { "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "requires": { "color-convert": "^2.0.1" } }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "bootstrap": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-3.4.1.tgz", "integrity": "sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA==" }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "requires": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", "wrap-ansi": "^7.0.0" } }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "requires": { "color-name": "~1.1.4" } }, "color-name": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "copyfiles": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/copyfiles/-/copyfiles-2.4.1.tgz", "integrity": "sha512-fereAvAvxDrQDOXybk3Qu3dPbOoKoysFMWtkY3mv5BsL8//OSZVL5DCLYqgRfY5cWirgRzlC+WSrxp6Bo3eNZg==", "requires": { "glob": "^7.0.5", "minimatch": "^3.0.3", "mkdirp": "^1.0.4", "noms": "0.0.0", "through2": "^2.0.1", "untildify": "^4.0.0", "yargs": "^16.1.0" } }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, "glob": { "version": "7.1.7", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.0.4", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", "requires": { "once": "^1.3.0", "wrappy": "1" } }, "inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "requires": { "brace-expansion": "^1.1.7" } }, "mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" }, "noms": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/noms/-/noms-0.0.0.tgz", "integrity": "sha1-2o69nzr51nYJGbJ9nNyAkqczKFk=", "requires": { "inherits": "^2.0.1", "readable-stream": "~1.0.31" } }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "requires": { "wrappy": "1" } }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "readable-stream": { "version": "1.0.34", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", "isarray": "0.0.1", "string_decoder": "~0.10.x" } }, "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "string-width": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.0" } }, "string_decoder": { "version": "0.10.31", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" }, "strip-ansi": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "requires": { "ansi-regex": "^5.0.0" } }, "through2": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", "requires": { "readable-stream": "~2.3.6", "xtend": "~4.0.1" }, "dependencies": { "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, "readable-stream": { "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" } } } }, "untildify": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==" }, "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "requires": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" }, "yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "requires": { "cliui": "^7.0.2", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.0", "y18n": "^5.0.5", "yargs-parser": "^20.2.2" } }, "yargs-parser": { "version": "20.2.9", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==" } } } jupyter_server-1.13.1/package.json000066400000000000000000000004361415445537200171700ustar00rootroot00000000000000{ "name": "jupyter_server", "private": true, "version": "1.0.0", "license": "BSD", "scripts": { "build": "copyfiles -f node_modules/bootstrap/dist/css/*.min.* jupyter_server/static/style" }, "dependencies": { "bootstrap": "^3.4.0", "copyfiles": "^2.4.1" } } jupyter_server-1.13.1/pyproject.toml000066400000000000000000000016211415445537200176130ustar00rootroot00000000000000[build-system] requires = ["jupyter_packaging~=0.9,<2"] build-backend = "jupyter_packaging.build_api" [tool.jupyter-packaging.builder] factory = "jupyter_packaging.npm_builder" [tool.check-manifest] ignore = ["tbump.toml", ".*", "*.yml", "package-lock.json", "bootstrap*", "conftest.py"] [tool.pytest.ini_options] addopts = "--doctest-modules" testpaths = [ "jupyter_server/" ] [tool.jupyter-releaser] skip = ["check-links"] [tool.tbump.version] current = "1.13.1" regex = ''' (?P\d+)\.(?P\d+)\.(?P\d+) ((?Pa|b|rc|.dev)(?P\d+))? ''' [tool.tbump.git] message_template = "Bump to {new_version}" tag_template = "v{new_version}" [[tool.tbump.file]] src = "jupyter_server/_version.py" version_template = '({major}, {minor}, {patch}, "{channel}", "{release}")' [[tool.tbump.field]] name = "channel" default = "" [[tool.tbump.field]] name = "release" default = "" jupyter_server-1.13.1/readthedocs.yml000066400000000000000000000003151415445537200177060ustar00rootroot00000000000000version: 2 sphinx: configuration: docs/source/conf.py conda: environment: docs/environment.yml python: version: 3.8 install: # install itself with pip install . - method: pip path: . jupyter_server-1.13.1/setup.cfg000066400000000000000000000034361415445537200165260ustar00rootroot00000000000000[metadata] name = jupyter_server version = attr: jupyter_server.__version__ description = The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications. long_description = file: README.md long_description_content_type = text/markdown license_file = COPYING.md author = Jupyter Development Team author_email = jupyter@googlegroups.com url = https://jupyter.org platforms = Linux, Mac OS X, Windows keywords = ipython, jupyter classifiers = Intended Audience :: Developers Intended Audience :: System Administrators Intended Audience :: Science/Research License :: OSI Approved :: BSD License Programming Language :: Python Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 [options] zip_safe = False include_package_data = True packages = find: python_requires = >=3.6 install_requires = jinja2 tornado>=6.1.0 pyzmq>=17 argon2-cffi ipython_genutils traitlets>=4.2.1 jupyter_core>=4.6.0 jupyter_client>=6.1.1 nbformat nbconvert Send2Trash terminado>=0.8.3 prometheus_client anyio>=3.1.0,<4 websocket-client [options.extras_require] test = coverage pytest>=6.0 pytest-cov pytest-mock requests pytest-tornasync pytest-console-scripts ipykernel # NOTE: we cannot auto install examples/simple here because of: # https://github.com/pypa/pip/issues/6658 [options.entry_points] console_scripts = jupyter-server = jupyter_server.serverapp:main [options.packages.find] exclude = ['docs*', 'examples*'] [flake8] ignore = E, C, W, F401, F403, F811, F841, E402, I100, I101, D400 builtins = c, get_config exclude = .cache, .github, docs, setup.py jupyter_server-1.13.1/setup.py000066400000000000000000000005061415445537200164120ustar00rootroot00000000000000from setuptools import setup try: from jupyter_packaging import wrap_installers, npm_builder ensured_targets = ["jupyter_server/static/style/bootstrap.min.css"] cmdclass = wrap_installers(pre_develop=npm_builder(), ensured_targets=ensured_targets) except ImportError: cmdclass = {} setup(cmdclass=cmdclass)