pax_global_header00006660000000000000000000000064145740462040014520gustar00rootroot0000000000000052 comment=18cc67e2f32d32ae3cebc6725251a597a4971851 jupyter_core-5.7.2/000077500000000000000000000000001457404620400142455ustar00rootroot00000000000000jupyter_core-5.7.2/.git-blame-ignore-revs000066400000000000000000000001071457404620400203430ustar00rootroot00000000000000# Initial pre-commit reformat d6a8168b9f6b8a28bba5f7cca3d6a9c31da041b6 jupyter_core-5.7.2/.github/000077500000000000000000000000001457404620400156055ustar00rootroot00000000000000jupyter_core-5.7.2/.github/dependabot.yml000066400000000000000000000005071457404620400204370ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" groups: actions: patterns: - "*" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" groups: actions: patterns: - "*" jupyter_core-5.7.2/.github/workflows/000077500000000000000000000000001457404620400176425ustar00rootroot00000000000000jupyter_core-5.7.2/.github/workflows/codeql-analysis.yml000066400000000000000000000047451457404620400234670ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # ******** NOTE ******** name: "CodeQL" on: push: branches: [master] pull_request: # The branches below must be a subset of the branches above branches: [master] schedule: # Make a pass every Saturday at 06:41 UTC - cron: "41 6 * * 6" permissions: security-events: write jobs: analyze: name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: ["python"] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more... # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main queries: security-and-quality # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 jupyter_core-5.7.2/.github/workflows/downstream.yml000066400000000000000000000052671457404620400225620ustar00rootroot00000000000000name: Test downstream projects on: push: branches: ["main"] pull_request: concurrency: group: downstream-${{ github.ref }} cancel-in-progress: true jobs: ipykernel: runs-on: ubuntu-latest timeout-minutes: 15 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: ipykernel nbclient: runs-on: ubuntu-latest timeout-minutes: 15 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: nbclient env_values: IPYKERNEL_CELL_NAME=\ nbconvert: runs-on: ubuntu-latest timeout-minutes: 15 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: nbconvert package_spec: pip install -e ".[test]" jupyter_server: runs-on: ubuntu-latest timeout-minutes: 15 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: jupyter_server jupyter_client: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Test uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: jupyter_client pytest_jupyter: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Test uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 with: package_name: pytest_jupyter package_spec: pip install -e ".[test,client,server]" downstreams_check: # This job does nothing and is only used for the branch protection if: always() needs: - ipykernel - nbclient - nbconvert - jupyter_server - jupyter_client - pytest_jupyter runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} jupyter_core-5.7.2/.github/workflows/enforce-label.yml000066400000000000000000000005001457404620400230560ustar00rootroot00000000000000name: Enforce PR label on: pull_request: types: [labeled, unlabeled, opened, edited, synchronize] jobs: enforce-label: runs-on: ubuntu-latest permissions: pull-requests: write steps: - name: enforce-triage-label uses: jupyterlab/maintainer-tools/.github/actions/enforce-label@v1 jupyter_core-5.7.2/.github/workflows/prep-release.yml000066400000000000000000000032311457404620400227500ustar00rootroot00000000000000name: "Step 1: Prep Release" on: workflow_dispatch: inputs: version_spec: description: "New Version Specifier" default: "next" required: false branch: description: "The branch to target" required: false post_version_spec: description: "Post Version Specifier" required: false silent: description: "Set a placeholder in the changelog and don't publish the release." required: false type: boolean since: description: "Use PRs with activity since this date or git reference" required: false since_last_stable: description: "Use PRs with activity since the last stable git tag" required: false type: boolean jobs: prep_release: runs-on: ubuntu-latest permissions: contents: write steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Prep Release id: prep-release uses: jupyter-server/jupyter_releaser/.github/actions/prep-release@v2 with: token: ${{ secrets.GITHUB_TOKEN }} version_spec: ${{ github.event.inputs.version_spec }} silent: ${{ github.event.inputs.silent }} post_version_spec: ${{ github.event.inputs.post_version_spec }} target: ${{ github.event.inputs.target }} branch: ${{ github.event.inputs.branch }} since: ${{ github.event.inputs.since }} since_last_stable: ${{ github.event.inputs.since_last_stable }} - name: "** Next Step **" run: | echo "Optional): Review Draft Release: ${{ steps.prep-release.outputs.release_url }}" jupyter_core-5.7.2/.github/workflows/publish-changelog.yml000066400000000000000000000016401457404620400237610ustar00rootroot00000000000000name: "Publish Changelog" on: release: types: [published] workflow_dispatch: inputs: branch: description: "The branch to target" required: false jobs: publish_changelog: runs-on: ubuntu-latest environment: release steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: actions/create-github-app-token@v1 id: app-token with: app-id: ${{ vars.APP_ID }} private-key: ${{ secrets.APP_PRIVATE_KEY }} - name: Publish changelog id: publish-changelog uses: jupyter-server/jupyter_releaser/.github/actions/publish-changelog@v2 with: token: ${{ steps.app-token.outputs.token }} branch: ${{ github.event.inputs.branch }} - name: "** Next Step **" run: | echo "Merge the changelog update PR: ${{ steps.publish-changelog.outputs.pr_url }}" jupyter_core-5.7.2/.github/workflows/publish-release.yml000066400000000000000000000034061457404620400234540ustar00rootroot00000000000000name: "Step 2: Publish Release" on: workflow_dispatch: inputs: branch: description: "The target branch" required: false release_url: description: "The URL of the draft GitHub release" required: false steps_to_skip: description: "Comma separated list of steps to skip" required: false jobs: publish_release: runs-on: ubuntu-latest environment: release permissions: id-token: write steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: actions/create-github-app-token@v1 id: app-token with: app-id: ${{ vars.APP_ID }} private-key: ${{ secrets.APP_PRIVATE_KEY }} - name: Populate Release id: populate-release uses: jupyter-server/jupyter_releaser/.github/actions/populate-release@v2 with: token: ${{ steps.app-token.outputs.token }} branch: ${{ github.event.inputs.branch }} release_url: ${{ github.event.inputs.release_url }} steps_to_skip: ${{ github.event.inputs.steps_to_skip }} - name: Finalize Release id: finalize-release uses: jupyter-server/jupyter_releaser/.github/actions/finalize-release@v2 with: token: ${{ steps.app-token.outputs.token }} release_url: ${{ steps.populate-release.outputs.release_url }} - name: "** Next Step **" if: ${{ success() }} run: | echo "Verify the final release" echo ${{ steps.finalize-release.outputs.release_url }} - name: "** Failure Message **" if: ${{ failure() }} run: | echo "Failed to Publish the Draft Release Url:" echo ${{ steps.populate-release.outputs.release_url }} jupyter_core-5.7.2/.github/workflows/test.yml000066400000000000000000000116141457404620400213470ustar00rootroot00000000000000# This workflow will install Python dependencies, run tests and lint with a variety of Python versions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions name: Python package on: push: branches: ["main"] pull_request: schedule: - cron: "0 8 * * *" concurrency: group: >- ${{ github.workflow }}- ${{ github.ref_type }}- ${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true defaults: run: shell: bash -eux {0} jobs: build: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] python-version: ["3.8", "3.12"] include: - os: windows-latest python-version: "3.9" - os: ubuntu-latest python-version: "3.11" - os: ubuntu-latest python-version: "pypy-3.9" - os: macos-latest python-version: "3.10" steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Test run: | hatch run cov:test - name: Check CLI run: | pip install . cd $HOME jupyter troubleshoot - uses: jupyterlab/maintainer-tools/.github/actions/upload-coverage@v1 coverage: runs-on: ubuntu-latest if: always() needs: - build steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/report-coverage@v1 test_minimum_versions: name: Test Minimum Versions timeout-minutes: 20 runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 with: dependency_type: minimum - name: Run the unit tests run: | hatch run test:nowarn || hatch run test:nowarn --lf test_prereleases: name: Test Prereleases runs-on: ubuntu-latest timeout-minutes: 20 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 with: dependency_type: pre - name: Run the tests run: | hatch run test:nowarn || hatch run test:nowarn --lf test_docs: name: Test Docs runs-on: windows-latest steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Build API docs run: | hatch run docs:api # If this fails run `hatch run docs:api` locally # and commit. git status --porcelain git status -s | grep "A" && exit 1 git status -s | grep "M" && exit 1 echo "API docs done" - run: hatch run docs:build test_lint: name: Test Lint runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Run Linters run: | hatch run typing:test hatch run lint:build pipx run interrogate -v . pipx run doc8 --max-line-length=200 make_sdist: name: Make SDist runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/make-sdist@v1 test_sdist: runs-on: ubuntu-latest needs: [make_sdist] name: Install from SDist and Test timeout-minutes: 20 steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1 check_release: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Base Setup uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - name: Install Dependencies run: | pip install -e . - name: Check Release uses: jupyter-server/jupyter_releaser/.github/actions/check-release@v2 with: token: ${{ secrets.GITHUB_TOKEN }} check_links: name: Check Links runs-on: ubuntu-latest timeout-minutes: 15 steps: - uses: actions/checkout@v4 - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/check-links@v1 tests_check: # This job does nothing and is only used for the branch protection if: always() needs: - coverage - test_lint - test_docs - test_minimum_versions - test_prereleases - check_links - check_release - test_sdist runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} jupyter_core-5.7.2/.gitignore000066400000000000000000000002561457404620400162400ustar00rootroot00000000000000MANIFEST build dist _build docs/gh-pages *.py[co] __pycache__ *.egg-info *~ *.bak .ipynb_checkpoints .tox .DS_Store \#*# .#* .coverage htmlcov .cache .idea docs/changelog.md jupyter_core-5.7.2/.pre-commit-config.yaml000066400000000000000000000042631457404620400205330ustar00rootroot00000000000000ci: autoupdate_schedule: monthly autoupdate_commit_msg: "chore: update pre-commit hooks" repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: check-case-conflict - id: check-ast - id: check-docstring-first - id: check-executables-have-shebangs - id: check-added-large-files - id: check-case-conflict - id: check-merge-conflict - id: check-json - id: check-toml - id: check-yaml - id: debug-statements - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.27.4 hooks: - id: check-github-workflows - repo: https://github.com/executablebooks/mdformat rev: 0.7.17 hooks: - id: mdformat additional_dependencies: [mdformat-gfm, mdformat-frontmatter, mdformat-footnote] - repo: https://github.com/pre-commit/mirrors-prettier rev: "v4.0.0-alpha.8" hooks: - id: prettier types_or: [yaml, html, json] - repo: https://github.com/adamchainz/blacken-docs rev: "1.16.0" hooks: - id: blacken-docs additional_dependencies: [black==23.7.0] - repo: https://github.com/pre-commit/mirrors-mypy rev: "v1.8.0" hooks: - id: mypy files: jupyter_core stages: [manual] args: ["--install-types", "--non-interactive"] additional_dependencies: ["traitlets>=5.13", "platformdirs>=3.11"] - repo: https://github.com/codespell-project/codespell rev: "v2.2.6" hooks: - id: codespell args: ["-L", "re-use"] - repo: https://github.com/pre-commit/pygrep-hooks rev: "v1.10.0" hooks: - id: rst-backticks - id: rst-directive-colons - id: rst-inline-touching-normal - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.2.0 hooks: - id: ruff types_or: [python, jupyter] args: ["--fix", "--show-fixes"] - id: ruff-format types_or: [python, jupyter] - repo: https://github.com/scientific-python/cookie rev: "2024.01.24" hooks: - id: sp-repo-review additional_dependencies: ["repo-review[cli]"] jupyter_core-5.7.2/.readthedocs.yaml000066400000000000000000000003051457404620400174720ustar00rootroot00000000000000version: 2 python: install: # install itself with pip install . - method: pip path: . extra_requirements: - docs build: os: ubuntu-22.04 tools: python: "3.11" jupyter_core-5.7.2/CHANGELOG.md000066400000000000000000001114101457404620400160540ustar00rootroot00000000000000# Changes in jupyter-core ## 5.7.2 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.7.1...1264a81fc834f18db2b41e136ec4ac9d1a4ad993)) ### Maintenance and upkeep improvements - Update Release Scripts [#396](https://github.com/jupyter/jupyter_core/pull/396) ([@blink1073](https://github.com/blink1073)) - Enforce pytest 7 [#393](https://github.com/jupyter/jupyter_core/pull/393) ([@blink1073](https://github.com/blink1073)) - chore: update pre-commit hooks [#392](https://github.com/jupyter/jupyter_core/pull/392) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2024-01-08&to=2024-03-12&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2024-01-08..2024-03-12&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2024-01-08..2024-03-12&type=Issues) ## 5.7.1 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.7.0...8e69e7f1b856099c057e9711d5a439b33ef859d3)) ### Bugs fixed - Derive JupyterAsyncApp from JupyterApp [#389](https://github.com/jupyter/jupyter_core/pull/389) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2024-01-03&to=2024-01-08&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2024-01-03..2024-01-08&type=Issues) ## 5.7.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.6.1...19912f49cfb9e2645e08fd1bd599b95206387d2d)) ### Enhancements made - Modernize event loop behavior [#387](https://github.com/jupyter/jupyter_core/pull/387) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - chore: update pre-commit hooks [#388](https://github.com/jupyter/jupyter_core/pull/388) ([@pre-commit-ci](https://github.com/pre-commit-ci)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2024-01-01&to=2024-01-03&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2024-01-01..2024-01-03&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2024-01-01..2024-01-03&type=Issues) ## 5.6.1 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.6.0...f81186b2f31fe79949fe5bcd5ea0da6b0794a8fb)) ### Bugs fixed - Fix export of package version [#386](https://github.com/jupyter/jupyter_core/pull/386) ([@blink1073](https://github.com/blink1073)) - Revert "Enable async JupyterApp" [#385](https://github.com/jupyter/jupyter_core/pull/385) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - Bump github/codeql-action from 2 to 3 [#380](https://github.com/jupyter/jupyter_core/pull/380) ([@dependabot](https://github.com/dependabot)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-12-26&to=2024-01-01&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-12-26..2024-01-01&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adependabot+updated%3A2023-12-26..2024-01-01&type=Issues) ## 5.6.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.5.1...e33fb748b435de7d587d9c2bfdcbb5f3ee73b4bd)) ### Enhancements made - Enable async JupyterApp [#381](https://github.com/jupyter/jupyter_core/pull/381) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-12-18&to=2023-12-26&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-12-18..2023-12-26&type=Issues) ## 5.5.1 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.5.0...4e387a4e95b1064cca4a97dd7324b29d14b6e3b7)) ### Bugs fixed - Account for read only file systems in migration [#379](https://github.com/jupyter/jupyter_core/pull/379) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - Update ruff and typings [#376](https://github.com/jupyter/jupyter_core/pull/376) ([@blink1073](https://github.com/blink1073)) - chore: update pre-commit hooks [#374](https://github.com/jupyter/jupyter_core/pull/374) ([@pre-commit-ci](https://github.com/pre-commit-ci)) - Clean up lint and add downstream checks [#373](https://github.com/jupyter/jupyter_core/pull/373) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-10-30&to=2023-12-18&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-10-30..2023-12-18&type=Issues) | [@mtelka](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Amtelka+updated%3A2023-10-30..2023-12-18&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2023-10-30..2023-12-18&type=Issues) ## 5.5.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.4.0...df0e9e709ebe208df799ec08030b7d7eb15c9cf9)) ### Maintenance and upkeep improvements - Fix typing for traitlets 5.13 [#372](https://github.com/jupyter/jupyter_core/pull/372) ([@blink1073](https://github.com/blink1073)) - Adopt pydata sphinx theme [#371](https://github.com/jupyter/jupyter_core/pull/371) ([@blink1073](https://github.com/blink1073)) - Use ruff format [#370](https://github.com/jupyter/jupyter_core/pull/370) ([@blink1073](https://github.com/blink1073)) - Update typings for mypy 1.6 [#368](https://github.com/jupyter/jupyter_core/pull/368) ([@blink1073](https://github.com/blink1073)) - Lowercase APPNAME on macOS Homebrew [#364](https://github.com/jupyter/jupyter_core/pull/364) ([@singingwolfboy](https://github.com/singingwolfboy)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-10-10&to=2023-10-30&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-10-10..2023-10-30&type=Issues) | [@singingwolfboy](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Asingingwolfboy+updated%3A2023-10-10..2023-10-30&type=Issues) ## 5.4.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.3.2...d6a748262a44334f27008e8c90d2557f46481f5b)) ### Maintenance and upkeep improvements - Add python 3.12 support [#367](https://github.com/jupyter/jupyter_core/pull/367) ([@blink1073](https://github.com/blink1073)) - Update typings for traitlets 5.11 [#366](https://github.com/jupyter/jupyter_core/pull/366) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-09-27&to=2023-10-10&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-09-27..2023-10-10&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adavidbrochart+updated%3A2023-09-27..2023-10-10&type=Issues) | [@jamescooke](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ajamescooke+updated%3A2023-09-27..2023-10-10&type=Issues) ## 5.3.2 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.3.1...0cf041784533c56da138a0947b6db29999790247)) ### Bugs fixed - Fix event loop handling [#362](https://github.com/jupyter/jupyter_core/pull/362) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - Bump actions/checkout from 3 to 4 [#361](https://github.com/jupyter/jupyter_core/pull/361) ([@dependabot](https://github.com/dependabot)) - Adopt sp-repo-review [#360](https://github.com/jupyter/jupyter_core/pull/360) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-06-14&to=2023-09-27&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-06-14..2023-09-27&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adependabot+updated%3A2023-06-14..2023-09-27&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2023-06-14..2023-09-27&type=Issues) ## 5.3.1 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.3.0...c64421919f6627f82c8899018bba0836760331f4)) ### Bugs fixed - Better handling of config migration [#356](https://github.com/jupyter/jupyter_core/pull/356) ([@smartass101](https://github.com/smartass101)) ### Maintenance and upkeep improvements - Fix write_executable test [#351](https://github.com/jupyter/jupyter_core/pull/351) ([@blink1073](https://github.com/blink1073)) - Use local coverage [#349](https://github.com/jupyter/jupyter_core/pull/349) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-03-16&to=2023-06-14&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-03-16..2023-06-14&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adavidbrochart+updated%3A2023-03-16..2023-06-14&type=Issues) | [@jamescooke](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ajamescooke+updated%3A2023-03-16..2023-06-14&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2023-03-16..2023-06-14&type=Issues) | [@smartass101](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Asmartass101+updated%3A2023-03-16..2023-06-14&type=Issues) ## 5.3.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.2.0...6cfe9fc042a0f016a0d4545bc6790b8277d80c24)) ### Bugs fixed - Fix pywin32 version constraint [#347](https://github.com/jupyter/jupyter_core/pull/347) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - Add license [#344](https://github.com/jupyter/jupyter_core/pull/344) ([@dcsaba89](https://github.com/dcsaba89)) - Support platformdirs 3 [#342](https://github.com/jupyter/jupyter_core/pull/342) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-01-30&to=2023-03-16&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-01-30..2023-03-16&type=Issues) | [@dcsaba89](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adcsaba89+updated%3A2023-01-30..2023-03-16&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2023-01-30..2023-03-16&type=Issues) ## 5.2.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.1.5...98b9a1a94e79d1137246b4c1f8c16343b72b050c)) ### Enhancements made - Set up shell command-line tab-completion for jupyter and subcommands [#337](https://github.com/jupyter/jupyter_core/pull/337) ([@azjps](https://github.com/azjps)) ### Maintenance and upkeep improvements - Add more linting [#338](https://github.com/jupyter/jupyter_core/pull/338) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-01-24&to=2023-01-30&type=c)) [@azjps](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Aazjps+updated%3A2023-01-24..2023-01-30&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-01-24..2023-01-30&type=Issues) ## 5.1.5 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.1.4...269449fe4dcb8d427b54337d83bcb67bf50e87da)) ### Maintenance and upkeep improvements - MAINT: Don't format logs in log call. [#336](https://github.com/jupyter/jupyter_core/pull/336) ([@Carreau](https://github.com/Carreau)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-01-23&to=2023-01-24&type=c)) [@Carreau](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3ACarreau+updated%3A2023-01-23..2023-01-24&type=Issues) ## 5.1.4 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.1.3...c268e9133b37f710360a102f3a5d47a84e8cadc8)) ### Bugs fixed - Suppress any exception in \_do_i_own shortcut [#335](https://github.com/jupyter/jupyter_core/pull/335) ([@minrk](https://github.com/minrk)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-01-09&to=2023-01-23&type=c)) [@minrk](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Aminrk+updated%3A2023-01-09..2023-01-23&type=Issues) ## 5.1.3 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.1.2...eb65690cb69a793edeb21b520c2358332933bb5d)) ### Bugs fixed - Clear the test environment before each function run [#333](https://github.com/jupyter/jupyter_core/pull/333) ([@jasongrout](https://github.com/jasongrout)) ### Maintenance and upkeep improvements - Add check-wheel-contents ignore [#332](https://github.com/jupyter/jupyter_core/pull/332) ([@blink1073](https://github.com/blink1073)) - Remove only absolute import. [#331](https://github.com/jupyter/jupyter_core/pull/331) ([@Carreau](https://github.com/Carreau)) - MAINT: Coro is not coro-function. [#330](https://github.com/jupyter/jupyter_core/pull/330) ([@Carreau](https://github.com/Carreau)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2023-01-02&to=2023-01-09&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2023-01-02..2023-01-09&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3ACarreau+updated%3A2023-01-02..2023-01-09&type=Issues) | [@jasongrout](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ajasongrout+updated%3A2023-01-02..2023-01-09&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2023-01-02..2023-01-09&type=Issues) ## 5.1.2 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.1.1...4bd8ba97c99fefc416fe43411f935d40e2b7e441)) ### Maintenance and upkeep improvements - Add spell check and ensure docstrings [#327](https://github.com/jupyter/jupyter_core/pull/327) ([@blink1073](https://github.com/blink1073)) - Maint: Some more precise typing. [#326](https://github.com/jupyter/jupyter_core/pull/326) ([@Carreau](https://github.com/Carreau)) ### Documentation improvements - Add api docs [#328](https://github.com/jupyter/jupyter_core/pull/328) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2022-12-22&to=2023-01-02&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2022-12-22..2023-01-02&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3ACarreau+updated%3A2022-12-22..2023-01-02&type=Issues) ## 5.1.1 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.1.0...1ed25e389116fbb98c513ee2148f38f9548e6198)) ### Enhancements made - Only prefer envs owned by the current user [#323](https://github.com/jupyter/jupyter_core/pull/323) ([@minrk](https://github.com/minrk)) ### Bugs fixed - Don't treat the conda root env as an env [#324](https://github.com/jupyter/jupyter_core/pull/324) ([@minrk](https://github.com/minrk)) ### Maintenance and upkeep improvements - Fix lint [#325](https://github.com/jupyter/jupyter_core/pull/325) ([@blink1073](https://github.com/blink1073)) - Adopt ruff and address lint [#321](https://github.com/jupyter/jupyter_core/pull/321) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2022-11-28&to=2022-12-22&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2022-11-28..2022-12-22&type=Issues) | [@jasongrout](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ajasongrout+updated%3A2022-11-28..2022-12-22&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Aminrk+updated%3A2022-11-28..2022-12-22&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2022-11-28..2022-12-22&type=Issues) ## 5.1.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/v5.0.0...9a976bb7d4f2d7092b2ee98b05a30eb1ff0be425)) ### Enhancements made - Add run_sync and ensure_async functions [#315](https://github.com/jupyter/jupyter_core/pull/315) ([@davidbrochart](https://github.com/davidbrochart)) ### Maintenance and upkeep improvements - Add more path tests [#316](https://github.com/jupyter/jupyter_core/pull/316) ([@blink1073](https://github.com/blink1073)) - Clean up workflows and add badges [#314](https://github.com/jupyter/jupyter_core/pull/314) ([@blink1073](https://github.com/blink1073)) - CI Cleanup [#312](https://github.com/jupyter/jupyter_core/pull/312) ([@blink1073](https://github.com/blink1073)) ### Documentation improvements - Clean up workflows and add badges [#314](https://github.com/jupyter/jupyter_core/pull/314) ([@blink1073](https://github.com/blink1073)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2022-11-09&to=2022-11-28&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2022-11-09..2022-11-28&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adavidbrochart+updated%3A2022-11-09..2022-11-28&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2022-11-09..2022-11-28&type=Issues) ## 5.0.0 ([Full Changelog](https://github.com/jupyter/jupyter_core/compare/4.9.2...fdbb55b59575a3eb6aeb502998a835b013401412)) ### Major Changes #### Prefer Environment Level Configuration We now make the assumption that if we are running in a virtual environment, we should prioritize the environment-level `sys.prefix` over the user-level paths. Users can opt out of this behavior by setting `JUPYTER_PREFER_ENV_PATH`, which takes precedence over our autodetection. #### Migrate to Standard Platform Directories In version 5, we introduce a `JUPYTER_PLATFORM_DIRS` environment variable to opt in to using more appropriate platform-specific directories. We raise a deprecation warning if the variable is not set. In version 6, `JUPYTER_PLATFORM_DIRS` will be opt-out. In version 7, we will remove the environment variable checks and old directory logic. #### Drop Support for Python 3.7 We are dropping support for Python 3.7 ahead of its official end of life, to reduce maintenance burden as we add support for Python 3.11. ### Enhancements made - Use platformdirs for path locations [#292](https://github.com/jupyter/jupyter_core/pull/292) ([@blink1073](https://github.com/blink1073)) - Try to detect if we are in a virtual environment and change path precedence accordingly [#286](https://github.com/jupyter/jupyter_core/pull/286) ([@jasongrout](https://github.com/jasongrout)) ### Bugs fixed - Add current working directory as first config path [#291](https://github.com/jupyter/jupyter_core/pull/291) ([@blink1073](https://github.com/blink1073)) - Fix inclusion of jupyter file and check in CI [#276](https://github.com/jupyter/jupyter_core/pull/276) ([@blink1073](https://github.com/blink1073)) ### Maintenance and upkeep improvements - Bump github/codeql-action from 1 to 2 [#308](https://github.com/jupyter/jupyter_core/pull/308) ([@dependabot](https://github.com/dependabot)) - Bump actions/checkout from 2 to 3 [#307](https://github.com/jupyter/jupyter_core/pull/307) ([@dependabot](https://github.com/dependabot)) - Add dependabot [#306](https://github.com/jupyter/jupyter_core/pull/306) ([@blink1073](https://github.com/blink1073)) - Adopt jupyter releaser [#305](https://github.com/jupyter/jupyter_core/pull/305) ([@blink1073](https://github.com/blink1073)) - Add more typing [#304](https://github.com/jupyter/jupyter_core/pull/304) ([@blink1073](https://github.com/blink1073)) - Require Python 3.8+ [#302](https://github.com/jupyter/jupyter_core/pull/302) ([@blink1073](https://github.com/blink1073)) - Use hatch backend [#265](https://github.com/jupyter/jupyter_core/pull/265) ([@blink1073](https://github.com/blink1073)) - Switch to flit build backend [#262](https://github.com/jupyter/jupyter_core/pull/262) ([@blink1073](https://github.com/blink1073)) - is_hidden: Use normalized paths [#271](https://github.com/jupyter/jupyter_core/pull/271) ([@martinRenou](https://github.com/martinRenou)) - ### Documentation - Update broken link to `Contributing` guide [#289](https://github.com/jupyter/jupyter_core/pull/289) ([@jamesr66a](https://github.com/jamesr66a)) ### Contributors to this release ([GitHub contributors page for this release](https://github.com/jupyter/jupyter_core/graphs/contributors?from=2022-02-15&to=2022-11-09&type=c)) [@blink1073](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ablink1073+updated%3A2022-02-15..2022-11-09&type=Issues) | [@bollwyvl](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Abollwyvl+updated%3A2022-02-15..2022-11-09&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adependabot+updated%3A2022-02-15..2022-11-09&type=Issues) | [@dlqqq](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Adlqqq+updated%3A2022-02-15..2022-11-09&type=Issues) | [@gaborbernat](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Agaborbernat+updated%3A2022-02-15..2022-11-09&type=Issues) | [@gutow](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Agutow+updated%3A2022-02-15..2022-11-09&type=Issues) | [@jamesr66a](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ajamesr66a+updated%3A2022-02-15..2022-11-09&type=Issues) | [@jaraco](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ajaraco+updated%3A2022-02-15..2022-11-09&type=Issues) | [@jasongrout](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ajasongrout+updated%3A2022-02-15..2022-11-09&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Akevin-bates+updated%3A2022-02-15..2022-11-09&type=Issues) | [@maartenbreddels](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Amaartenbreddels+updated%3A2022-02-15..2022-11-09&type=Issues) | [@martinRenou](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3AmartinRenou+updated%3A2022-02-15..2022-11-09&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Ameeseeksmachine+updated%3A2022-02-15..2022-11-09&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter%2Fjupyter_core+involves%3Apre-commit-ci+updated%3A2022-02-15..2022-11-09&type=Issues) ## 5.0.0rc0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/5.0.0rc0) - Try to detect if we are in a virtual environment and change path precedence accordingly. ([#286](https://github.com/jupyter/jupyter_core/pull/286)) - Update broken link to Contributing guide. ([#289](https://github.com/jupyter/jupyter_core/pull/289)) - Add current working directory as first config path. ([#291](https://github.com/jupyter/jupyter_core/pull/291)) - Use platformdirs for path locations. ([#292](https://github.com/jupyter/jupyter_core/pull/292)) ## 4.11 ### 4.11.1 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.11.1) - Fix inclusion of jupyter file and check in CI. ([#276](https://github.com/jupyter/jupyter_core/pull/276)) ### 4.11.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.11.0) - Use hatch build backend. ([#265](https://github.com/jupyter/jupyter_core/pull/265)) - `is_hidden`: Use normalized paths. ([#271](https://github.com/jupyter/jupyter_core/pull/271)) ## 4.10 ### 4.10.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.10.0) - Include all files from `jupyter_core`. ([#253](https://github.com/jupyter/jupyter_core/pull/253)) - Add project URLs to `setup.cfg`. ([#254](https://github.com/jupyter/jupyter_core/pull/254)) - Set up pre-commit. ([#255](https://github.com/jupyter/jupyter_core/pull/255)) - Add flake8 and mypy settings. ([#256](https://github.com/jupyter/jupyter_core/pull/256)) - Clean up CI. ([#258](https://github.com/jupyter/jupyter_core/pull/258)) ## 4.9 ### 4.9.2 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.9.1) - Set proper `sys.argv[0]` for subcommand. ([#248](https://github.com/jupyter/jupyter_core/pull/248)) - Add explicit encoding in open calls. ([#249](https://github.com/jupyter/jupyter_core/pull/249)) - `jupyter_config_dir` - reorder `home_dir` initialization. ([#251](https://github.com/jupyter/jupyter_core/pull/251)) ### 4.9.1 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.9.0) - Add a workaround for virtualenv for getting the user site directory. ([#247](https://github.com/jupyter/jupyter_core/pull/247)) ### 4.9.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.9.0) See the [jupyter_core 4.9](https://github.com/jupyter/jupyter_core/milestone/21?closed=1) milestone on GitHub for the full list of pull requests and issues closed. - Add Python site user base subdirectories to config and data user-level paths if `site.ENABLE_USER_SITE` is True. One way to disable these directory additions is to set the `PYTHONNOUSERSITE` environment variable. These locations can be customized by setting the `PYTHONUSERBASE` environment variable. ([#242](https://github.com/jupyter/jupyter_core/pull/242)) ## 4.8 ### 4.8.2 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.8.2) jupyter_core 4.8.1 was released the same day as 4.8.0 and also included the fix below for the Windows tests. Unfortunately, the 4.8.1 release commit and tag were not pushed to GitHub. We are releasing 4.8.2 so we have a commit and tag in version control. - Fix windows test regression ([#240](https://github.com/jupyter/jupyter_core/pull/240)) ### 4.8.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.8.0) See the [jupyter_core 4.8](https://github.com/jupyter/jupyter_core/milestone/20?closed=1) milestone on GitHub for the full list of pull requests and issues closed. jupyter-core now has experimental support for PyPy (Python 3.7). Some features are known not to work due to limitations in PyPy, such as hidden file detection on Windows. - Print an error message instead of an exception when a command is not found ([#218](https://github.com/jupyter/jupyter_core/pull/218)) - Return canonical path when using `%APPDATA%` on Windows ([#222](https://github.com/jupyter/jupyter_core/pull/222)) - Print full usage on missing or invalid commands ([#225](https://github.com/jupyter/jupyter_core/pull/225)) - Remove dependency on `pywin32` package on PyPy ([#230](https://github.com/jupyter/jupyter_core/pull/230)) - Update packages listed in `jupyter --version` ([#232](https://github.com/jupyter/jupyter_core/pull/232)) - Inherit base aliases/flags from traitlets Application, including `--show-config` from traitlets 5 ([#233](https://github.com/jupyter/jupyter_core/pull/233)) - Trigger warning when trying to check hidden file status on PyPy ([#238](https://github.com/jupyter/jupyter_core/pull/238)) ## 4.7 ### 4.7.1 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.7.1) - Allow creating user to delete secure file ([#213](https://github.com/jupyter/jupyter_core/pull/213)) ### 4.7.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.7.0) See the [jupyter_core 4.7](https://github.com/jupyter/jupyter_core/milestone/19?closed=1) milestone on GitHub for the full list of pull requests and issues closed. - Add a new `JUPYTER_PREFER_ENV_PATH` variable, which can be set to switch the order of the environment-level path and the user-level path in the Jupyter path hierarchy (e.g., `jupyter --paths`). It is considered set if it is a value that is not one of 'no', 'n', 'off', 'false', '0', or '0.0' (case insensitive). If you are running Jupyter in multiple virtual environments as the same user, you will likely want to set this environment variable. - Drop Python 2.x and 3.5 support, as they have reached end of life. - Add Python 3.9 builds to testing, and expand testing to cover Windows, macOS, and Linux platforms. - `jupyter --paths --debug` now explains the environment variables that affect the current path list. - Update the file hidden check on Windows to use new Python features rather than ctypes directly. - Add conda environment information in `jupyter troubleshoot`. - Update `_version.version_info` and `_version.__version__` to follow Python conventions. ## 4.6 ### 4.6.3 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.6.3) - Changed windows secure_write path to skip all filesystem permission checks when running in insecure mode. Too many exception paths existed for mounted file systems to reliably try to set them before opting out with the insecure write pattern. ### 4.6.2 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.6.2) - Add ability to allow insecure writes with JUPYTER_ALLOW_INSECURE_WRITES environment variable ([#182](https://github.com/jupyter/jupyter_core/pull/182)). - Docs typo and build fixes - Added python 3.7 and 3.8 builds to testing ### 4.6.1 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.6.1) - Tolerate execute bit in owner permissions when validating secure writes ([#173](https://github.com/jupyter/jupyter_core/pull/173)). - Fix project name typo in copyright ([#171](https://github.com/jupyter/jupyter_core/pull/171)). ### 4.6.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.6.0) - Unicode characters existing in the user's home directory name are properly handled ([#131](https://github.com/jupyter/jupyter_core/pull/131)). - `mock` is now only required for testing on Python 2 ([#157](https://github.com/jupyter/jupyter_core/pull/157)). - Deprecation warnings relative to `_runtime_dir_changed` are no longer produced ([#158](https://github.com/jupyter/jupyter_core/pull/158)). - The `scripts` directory relative to the current python environment is now appended to the search directory for subcommands ([#162](https://github.com/jupyter/jupyter_core/pull/162)). - Some utility functions (`exists()`, `is_hidden()`, `secure_write()`) have been moved from `jupyter_client` and `jupyter_server` to `jupyter_core` ([#163](https://github.com/jupyter/jupyter_core/pull/163)). - Fix error on Windows when setting private permissions ([#166](https://github.com/jupyter/jupyter_core/pull/166)). ## 4.5 ### 4.5.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.5.0) - `jupyter --version` now tries to show the version number of various other installed Jupyter packages, not just `jupyter_core` ([#136](https://github.com/jupyter/jupyter_core/pull/136)). This will hopefully make it clearer that there are various packages with their own version numbers. - Allow a `JUPYTER_CONFIG_PATH` environment variable to specify a search path of additional locations for config ([#139](https://github.com/jupyter/jupyter_core/pull/139)). - `jupyter subcommand` no longer modifies the `PATH` environment variable when it runs `jupyter-subcommand` ([#148](https://github.com/jupyter/jupyter_core/pull/148)). - Jupyter's 'runtime' directory no longer uses `XDG_RUNTIME_DIR` ([#143](https://github.com/jupyter/jupyter_core/pull/143)). While it has some attractive properties, it has led to various problems; see the pull request for details. - Fix `JupyterApp` to respect the `raise_config_file_errors` traitlet ([#149](https://github.com/jupyter/jupyter_core/pull/149)). - Various improvements to the bash completion scripts in this repository ([#125](https://github.com/jupyter/jupyter_core/pull/125), [#126](https://github.com/jupyter/jupyter_core/pull/126)). - The `setup.py` script now always uses setuptools, like most other Jupyter projects ([#147](https://github.com/jupyter/jupyter_core/pull/147)). - The LICENSE file is included in wheels ([#133](https://github.com/jupyter/jupyter_core/pull/133)). ## 4.4 ### 4.4.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.4.0) - `jupyter troubleshoot` gets the list of packages from the Python environment it's in, by using `sys.executable` to call `pip list` ([#104](https://github.com/jupyter/jupyter_core/pull/104)). - Added utility function `ensure_dir_exists`, and switched to using it over the one from ipython_genutils, which does permissions wrong ([#113](https://github.com/jupyter/jupyter_core/pull/113)). - Avoid creating the `~/.ipython` directory when checking if it exists for config migration ([#118](https://github.com/jupyter/jupyter_core/pull/118)). - Fix mistaken description in zsh completions ([#98](https://github.com/jupyter/jupyter_core/pull/98)). - Fix subcommand tests on Windows ([#103](https://github.com/jupyter/jupyter_core/pull/103)). - The README now describes how to work on `jupyter_core` and build the docs ([#110](https://github.com/jupyter/jupyter_core/pull/110)). - Fix a broken link to a release in the docs ([#109](https://github.com/jupyter/jupyter_core/pull/109)). ## 4.3 ### 4.3.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.3.0) - Add `JUPYTER_NO_CONFIG` environment variable for disabling all Jupyter configuration. - More detailed error message when failing to launch subcommands. ## 4.2 ### 4.2.1 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.2.1) - Fix error message on Windows when subcommand not found. - Correctly display PATH in `jupyter troubleshoot` on Windows. ### 4.2.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.2.0) - Make `jupyter` directory top priority in search path for subcommands, so that `jupyter-subcommand` next to `jupyter` will always be picked if present. - Avoid using `shell=True` for subcommand dispatch on Windows. ## 4.1 ### 4.1.1 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.1.1) - Include symlink directory and real location on subcommand PATH when `jupyter` is a symlink. ### 4.1.0 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.1.0) - Add `jupyter.py` module, so that `python -m jupyter` always works. - Add prototype `jupyter troubleshoot` command for displaying environment info. - Ensure directory containing `jupyter` executable is included when dispatching subcommands. - Unicode fixes for Legacy Python. ## 4.0 ### 4.0.6 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.0.6) - fix typo preventing migration when custom.css is missing ### 4.0.5 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.0.5) - fix subcommands on Windows (yes, again) - fix migration when custom.js/css are not present ### 4.0.4 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.0.4) - fix subcommands on Windows (again) - ensure `jupyter --version` outputs to stdout ### 4.0.3 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.0.3) - setuptools fixes needed to run on Windows ### 4.0.2 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.0.2) - fixes for jupyter-migrate ### 4.0.1 [on GitHub](https://github.com/jupyter/jupyter_core/releases/tag/4.0.1) This is the first release of the jupyter-core package. jupyter_core-5.7.2/CONTRIBUTING.md000066400000000000000000000004031457404620400164730ustar00rootroot00000000000000# Contributing We follow the [Jupyter Contributing Guide](https://docs.jupyter.org/en/latest/contributing/content-contributor.html). See the [README](https://github.com/jupyter/jupyter_core/blob/master/README.md) on how to set up a development environment. jupyter_core-5.7.2/LICENSE000066400000000000000000000030001457404620400152430ustar00rootroot00000000000000BSD 3-Clause License - Copyright (c) 2015-, Jupyter Development Team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. jupyter_core-5.7.2/README.md000066400000000000000000000105601457404620400155260ustar00rootroot00000000000000# Jupyter Core [![Build Status](https://github.com/jupyter/jupyter_core/actions/workflows/test.yml/badge.svg?query=branch%3Amain++)](https://github.com/jupyter/jupyter_core/actions/workflows/test.yml/badge.svg?query=branch%3Amain++) [![Documentation Status](https://readthedocs.org/projects/jupyter-core/badge/?version=latest)](http://jupyter-core.readthedocs.io/en/latest/?badge=latest) Core common functionality of Jupyter projects. This package contains base application classes and configuration inherited by other projects. It doesn't do much on its own. # Development Setup The [Jupyter Contributor Guides](https://docs.jupyter.org/en/latest/contributing/content-contributor.html) provide extensive information on contributing code or documentation to Jupyter projects. The limited instructions below for setting up a development environment are for your convenience. ## Coding You'll need Python and `pip` on the search path. Clone the Jupyter Core git repository to your computer, for example in `/my/projects/jupyter_core`. Now create an [editable install](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs) and download the dependencies of code and test suite by executing: ``` cd /my/projects/jupyter_core/ pip install -e ".[test]" py.test ``` The last command runs the test suite to verify the setup. During development, you can pass filenames to `py.test`, and it will execute only those tests. ## Code Styling `jupyter_core` has adopted automatic code formatting so you shouldn't need to worry too much about your code style. As long as your code is valid, the pre-commit hook should take care of how it should look. `pre-commit` and its associated hooks will automatically be installed when you run `pip install -e ".[test]"` To install `pre-commit` manually, run the following: ```bash pip install pre-commit pre-commit install ``` You can invoke the pre-commit hook by hand at any time with: ```bash pre-commit run ``` which should run any autoformatting on your code and tell you about any errors it couldn't fix automatically. You may also install [black integration](https://github.com/psf/black#editor-integration) into your text editor to format code automatically. If you have already committed files before setting up the pre-commit hook with `pre-commit install`, you can fix everything up using `pre-commit run --all-files`. You need to make the fixing commit yourself after that. ## Documentation The documentation of Jupyter Core is generated from the files in `docs/` using Sphinx. Instructions for setting up Sphinx with a selection of optional modules are in the [Documentation Guide](https://docs.jupyter.org/en/latest/contributing/content-contributor.html). You'll also need the `make` command. For a minimal Sphinx installation to process the Jupyter Core docs, execute: ``` pip install sphinx ``` The following commands build the documentation in HTML format and check for broken links: ``` cd /my/projects/jupyter_core/docs/ make html linkcheck ``` Point your browser to the following URL to access the generated documentation: _file:///my/projects/jupyter_core/docs/\_build/html/index.html_ ## About the Jupyter Development Team The Jupyter Development Team is the set of all contributors to the Jupyter project. This includes all of the Jupyter subprojects. A full list with details is kept in the documentation directory, in the file `about/credits.txt`. The core team that coordinates development on GitHub can be found here: https://github.com/ipython/. ## Our Copyright Policy Jupyter uses a shared copyright model. Each contributor maintains copyright over their contributions to Jupyter. It is important to note that these contributions are typically only changes to the repositories. Thus, the Jupyter source code in its entirety is not the copyright of any single person or institution. Instead, it is the collective copyright of the entire Jupyter Development Team. If individual contributors want to maintain a record of what changes/contributions they have specific copyright on, they should indicate their copyright in the commit message of the change, when they commit the change to one of the Jupyter repositories. With this in mind, the following banner should be used in any source code file to indicate the copyright and license terms: ``` # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. ``` jupyter_core-5.7.2/RELEASE.md000066400000000000000000000002671457404620400156540ustar00rootroot00000000000000# Making a Release The recommended way to make a release is to use [`jupyter_releaser`](https://jupyter-releaser.readthedocs.io/en/latest/get_started/making_release_from_repo.html). jupyter_core-5.7.2/docs/000077500000000000000000000000001457404620400151755ustar00rootroot00000000000000jupyter_core-5.7.2/docs/Makefile000066400000000000000000000164111457404620400166400ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/jupyter_core.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/jupyter_core.qhc" applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/jupyter_core" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/jupyter_core" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." jupyter_core-5.7.2/docs/api/000077500000000000000000000000001457404620400157465ustar00rootroot00000000000000jupyter_core-5.7.2/docs/api/jupyter_core.rst000066400000000000000000000014731457404620400212170ustar00rootroot00000000000000jupyter\_core package ===================== Subpackages ----------- .. toctree:: :maxdepth: 4 jupyter_core.utils Submodules ---------- .. automodule:: jupyter_core.application :members: :undoc-members: :show-inheritance: .. automodule:: jupyter_core.command :members: :undoc-members: :show-inheritance: .. automodule:: jupyter_core.migrate :members: :undoc-members: :show-inheritance: .. automodule:: jupyter_core.paths :members: :undoc-members: :show-inheritance: .. automodule:: jupyter_core.troubleshoot :members: :undoc-members: :show-inheritance: .. automodule:: jupyter_core.version :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: jupyter_core :members: :undoc-members: :show-inheritance: jupyter_core-5.7.2/docs/api/jupyter_core.utils.rst000066400000000000000000000002631457404620400223520ustar00rootroot00000000000000jupyter\_core.utils package =========================== Module contents --------------- .. automodule:: jupyter_core.utils :members: :undoc-members: :show-inheritance: jupyter_core-5.7.2/docs/api/modules.rst000066400000000000000000000001111457404620400201410ustar00rootroot00000000000000jupyter_core ============ .. toctree:: :maxdepth: 4 jupyter_core jupyter_core-5.7.2/docs/conf.py000066400000000000000000000232201457404620400164730ustar00rootroot00000000000000# jupyter_core documentation build configuration file, created by # sphinx-quickstart on Wed Jun 24 11:51:36 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from __future__ import annotations import shutil from pathlib import Path from jupyter_core.version import __version__, version_info # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "myst_parser", "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinxcontrib_github_alt", "sphinx_autodoc_typehints", ] try: import enchant # noqa: F401 extensions += ["sphinxcontrib.spelling"] except ImportError: pass github_project_url = "https://github.com/jupyter/jupyter_core" # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. project = "jupyter_core" copyright = "2015, Jupyter Development Team" author = "Jupyter Development Team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = "%d.%d" % version_info[:2] # type:ignore[str-format] # The full version, including alpha/beta/rc tags. release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = "pydata_sphinx_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = {"navigation_with_keys": False} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = "jupyter_coredoc" # -- Options for LaTeX output --------------------------------------------- # latex_elements: dict = {} # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "jupyter_core.tex", "jupyter\\_core Documentation", "Jupyter Development Team", "manual", ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "jupyter_core", "jupyter_core Documentation", [author], 1)] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "jupyter_core", "jupyter_core Documentation", author, "jupyter_core", "One line description of project.", "Miscellaneous", ), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {"https://docs.python.org/3/": None} def setup(_): here = Path(__file__).parent.resolve() shutil.copy(Path(here, "..", "CHANGELOG.md"), "changelog.md") jupyter_core-5.7.2/docs/index.rst000066400000000000000000000006471457404620400170450ustar00rootroot00000000000000jupyter_core |version| ====================== This documentation only describes the public API in the ``jupyter_core`` package. For overview information about using Jupyter, see the `main Jupyter docs `__. Contents: .. toctree:: :maxdepth: 2 changelog API Docs Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` jupyter_core-5.7.2/docs/make.bat000066400000000000000000000161301457404620400166030ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 2> nul if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\jupyter_core.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\jupyter_core.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end jupyter_core-5.7.2/examples/000077500000000000000000000000001457404620400160635ustar00rootroot00000000000000jupyter_core-5.7.2/examples/completions-zsh000066400000000000000000000144031457404620400211460ustar00rootroot00000000000000#compdef jupyter # A zsh autocompleter for jupyter. _jupyter() { IFS=$'\n' local context curcontext="$curcontext" state line typeset -A opt_args local ret=1 _arguments -C \ '1: :_jupyter_cmds' \ '(-h,--help)'{-h,--help}'[Show this help message and exit.]' \ '--version[Show the jupyter command'\''s version and exit.]' \ '--config-dir[Show Jupyter config dir.]' \ '--data-dir[Show Jupyter data dir.]' \ '--runtime-dir[Show Jupyter runtime dir.]' \ '--paths[Show all Jupyter paths. Add --json for machine-readable format.]' \ '--json[Output paths as machine-readable json.]' \ '*::arg:->args' \ && ret=0 case $state in (args) curcontext="${curcontext%:*:*}:jupyter-cmd-$words[1]:" local update_policy zstyle -s ":completion:${curcontext}:" cache-policy update_policy [[ -z "$update_policy" ]] && \ zstyle ":completion:${curcontext}:" \ cache-policy _jupyter_options_caching_policy local cache_id=jupyter_options local subcmd=$line[1] if (_cache_invalid $cache_id || ! _retrieve_cache $cache_id || \ [[ ${(P)+subcmd} -eq 0 ]] || _cache_invalid $cache_id); then typeset -agU $subcmd set -A $subcmd $( (jupyter $subcmd --help-all | \ grep -o '^--[^-][^= ]\+=\?' | sed 's/\([^=]*\)\(=\?\)/(\1)\1\2:/') 2>/dev/null) _store_cache $cache_id $subcmd fi case $subcmd in (console) _arguments \ '1:Source file:_files -g "*.py"' \ ${(P)subcmd} && ret=0 ;; (kernelspec) sub2cmd=$line[2] case $sub2cmd in (install|list) if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then typeset -agU $sub2cmd set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) _store_cache $cache_id $sub2cmd fi _arguments "1: :_${subcmd}_cmds" ${(P)sub2cmd} && ret=0 ;; *) _arguments "1: :_${subcmd}_cmds" ${(P)subcmd} && ret=0 ;; esac ;; (nbconvert) _arguments \ '1:Source file:_files -g "*.ipynb"' \ ${(P)subcmd} && ret=0 ;; (nbextension) sub2cmd=$line[2] case $sub2cmd in (disable|enable) if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then typeset -agU $sub2cmd set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) _store_cache $cache_id $sub2cmd fi _arguments \ '1: :_nbextension_cmds' \ '2:Extension path:_files' \ ${(P)sub2cmd} && ret=0 ;; (install) if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then typeset -agU $sub2cmd set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) _store_cache $cache_id $sub2cmd fi _arguments \ '1: :_nbextension_cmds' \ '2:Extension path:_files' \ ${(P)sub2cmd} && ret=0 ;; *) _arguments "1: :_${subcmd}_cmds" ${(P)subcmd} && ret=0 ;; esac ;; (notebook) sub2cmd=$line[2] case $sub2cmd in (list) if ([[ ${(P)+sub2cmd} -eq 0 ]]) && ! _retrieve_cache $cache_id; then typeset -agU $sub2cmd set -A $sub2cmd $(_jupyter_get_options $subcmd $sub2cmd) _store_cache $cache_id $sub2cmd fi _arguments "1: :_${subcmd}_cmds" ${(P)sub2cmd} && ret=0 ;; *) _arguments "1: :_${subcmd}_cmds" ${(P)subcmd} && ret=0 ;; esac ;; (trust) _arguments \ '*:Source file:_files -g "*.ipynb"' \ ${(P)subcmd} && ret=0 ;; *) _arguments ${(P)subcmd} && ret=0 ;; esac ;; esac } _jupyter_options_caching_policy() { local -a newer # rebuild if cache does not exist or is more than a week old newer=( "$1"(Nmw-1) ) return $#newer } _jupyter_get_options() { echo '(--help)--help[Print help about subcommand.]:' (jupyter "$@" --help-all | \ grep -o '^--[^-][^= ]\+=\?' | sed 's/\([^=]*\)\(=\?\)/(\1)\1\2:/') 2>/dev/null } _jupyter_cmds() { local -a commands if whence jupyter-console >/dev/null; then commands=($commands 'console:Launch a Console application inside a terminal.') fi if whence jupyter-kernelspec >/dev/null; then commands=($commands 'kernelspec:Manage Jupyter kernel specifications.') fi if whence jupyter-nbconvert >/dev/null; then commands=($commands 'nbconvert:Convert notebook files to various other formats.') fi if whence jupyter-nbextension >/dev/null; then commands=($commands 'nbextension:Work with Jupyter notebook extensions.') fi if whence jupyter-notebook >/dev/null; then commands=($commands 'notebook:Launch a Tornado based HTML Notebook Server.') fi if whence jupyter-qtconsole >/dev/null; then commands=($commands 'qtconsole:Launch a Console-style application using Qt.') fi if whence jupyter-trust >/dev/null; then commands=($commands 'trust:Sign Jupyter notebooks with your key, to trust their dynamic output.') fi _describe -t commands 'jupyter command' commands "$@" } _kernelspec_cmds() { local commands; commands=( 'help:Print help about subcommand.' 'install:Install a kernel specification directory.' 'list:List installed kernel specifications.' ) _describe -t commands 'kernelspec command' commands "$@" } _nbextension_cmds() { local commands; commands=( 'help:Print help about subcommand.' 'enable:Enable a notebook extension.' 'install:Install notebook extensions.' 'disable:Disable a notebook extension.' ) _describe -t commands 'nbextension command' commands "$@" } _notebook_cmds() { local commands; commands=( 'help:Print help about subcommand.' 'list:List currently running notebook servers in this profile.' ) _describe -t commands 'notebook command' commands "$@" } _jupyter "$@" # vim: ft=zsh sw=2 ts=2 et jupyter_core-5.7.2/examples/jupyter-completion.bash000066400000000000000000000060111457404620400225710ustar00rootroot00000000000000# load with: . jupyter-completion.bash # # NOTE: with traitlets>=5.8, jupyter and its subcommands now directly support # shell command-line tab-completion using argcomplete, which has more complete # support than this script. Simply install argcomplete and activate global # completion by following the relevant instructions in: # https://kislyuk.github.io/argcomplete/#activating-global-completion if [[ -n ${ZSH_VERSION-} ]]; then autoload -Uz bashcompinit && bashcompinit fi _jupyter_get_flags() { local url=$1 local var=$2 local dash=$3 if [[ "$url $var" == $__jupyter_complete_last ]]; then opts=$__jupyter_complete_last_res return fi if [ -z $1 ]; then opts=$(jupyter --help | sed -n 's/^ -/-/p' |sed -e 's/, /\n/' |sed -e 's/\(-[[:alnum:]_-]*\).*/\1/') else # matplotlib and profile don't need the = and the # version without simplifies the special cased completion opts=$(jupyter ${url} --help-all | grep -E "^-{1,2}[^-]" | sed -e "s/<.*//" -e "s/[^=]$/& /" -e "$ s/^/\n-h\n--help\n--help-all\n/") fi __jupyter_complete_last="$url $var" __jupyter_complete_last_res="$opts" } _jupyter() { local cur=${COMP_WORDS[COMP_CWORD]} local prev=${COMP_WORDS[COMP_CWORD - 1]} local subcommands="notebook qtconsole console nbconvert kernelspec trust " local opts="help" local mode="" for i in "${COMP_WORDS[@]}"; do [ "$cur" = "$i" ] && break if [[ ${subcommands} == *${i}* ]]; then mode="$i" break elif [[ ${i} == "--"* ]]; then mode="nosubcommand" break fi done if [[ ${cur} == -* ]]; then case $mode in "notebook" | "qtconsole" | "console" | "nbconvert") _jupyter_get_flags $mode opts=$"${opts}" ;; "kernelspec") if [[ $COMP_CWORD -ge 3 ]]; then # 'history trim' and 'history clear' covered by next line _jupyter_get_flags $mode\ "${COMP_WORDS[2]}" else _jupyter_get_flags $mode fi opts=$"${opts}" ;; *) _jupyter_get_flags esac # don't drop the trailing space local IFS=$'\t\n' COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) return 0 elif [[ $mode == "kernelspec" ]]; then if [[ $COMP_CWORD -ge 3 ]]; then # drop into flags opts="--" else opts="list install " fi local IFS=$'\t\n' COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) else if [ "$COMP_CWORD" == 1 ]; then local IFS=$'\t\n' local sub=$(echo $subcommands | sed -e "s/ / \t/g") COMPREPLY=( $(compgen -W "${sub}" -- ${cur}) ) else local IFS=$'\n' COMPREPLY=( $(compgen -o filenames -f -- ${cur}) ) fi fi } complete -o default -o nospace -F _jupyter jupyter jupyter_core-5.7.2/jupyter.py000066400000000000000000000002331457404620400163170ustar00rootroot00000000000000"""Launch the root jupyter command""" from __future__ import annotations if __name__ == "__main__": from jupyter_core.command import main main() jupyter_core-5.7.2/jupyter_core/000077500000000000000000000000001457404620400167575ustar00rootroot00000000000000jupyter_core-5.7.2/jupyter_core/__init__.py000066400000000000000000000001411457404620400210640ustar00rootroot00000000000000from __future__ import annotations from .version import __version__, version_info # noqa: F401 jupyter_core-5.7.2/jupyter_core/__main__.py000066400000000000000000000001541457404620400210510ustar00rootroot00000000000000"""Launch the root jupyter command""" from __future__ import annotations from .command import main main() jupyter_core-5.7.2/jupyter_core/application.py000066400000000000000000000242061457404620400216400ustar00rootroot00000000000000""" A base Application class for Jupyter applications. All Jupyter applications should inherit from this. """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import logging import os import sys import typing as t from copy import deepcopy from pathlib import Path from shutil import which from traitlets import Bool, List, Unicode, observe from traitlets.config.application import Application, catch_config_error from traitlets.config.loader import ConfigFileNotFound from .paths import ( allow_insecure_writes, issue_insecure_write_warning, jupyter_config_dir, jupyter_config_path, jupyter_data_dir, jupyter_path, jupyter_runtime_dir, ) from .utils import ensure_dir_exists, ensure_event_loop # mypy: disable-error-code="no-untyped-call" # aliases and flags base_aliases: dict[str, t.Any] = {} if isinstance(Application.aliases, dict): # traitlets 5 base_aliases.update(Application.aliases) _jupyter_aliases = { "log-level": "Application.log_level", "config": "JupyterApp.config_file", } base_aliases.update(_jupyter_aliases) base_flags: dict[str, t.Any] = {} if isinstance(Application.flags, dict): # traitlets 5 base_flags.update(Application.flags) _jupyter_flags: dict[str, t.Any] = { "debug": ( {"Application": {"log_level": logging.DEBUG}}, "set log level to logging.DEBUG (maximize logging output)", ), "generate-config": ({"JupyterApp": {"generate_config": True}}, "generate default config file"), "y": ( {"JupyterApp": {"answer_yes": True}}, "Answer yes to any questions instead of prompting.", ), } base_flags.update(_jupyter_flags) class NoStart(Exception): """Exception to raise when an application shouldn't start""" class JupyterApp(Application): """Base class for Jupyter applications""" name = "jupyter" # override in subclasses description = "A Jupyter Application" aliases = base_aliases flags = base_flags def _log_level_default(self) -> int: return logging.INFO jupyter_path = List(Unicode()) def _jupyter_path_default(self) -> list[str]: return jupyter_path() config_dir = Unicode() def _config_dir_default(self) -> str: return jupyter_config_dir() @property def config_file_paths(self) -> list[str]: path = jupyter_config_path() if self.config_dir not in path: # Insert config dir as first item. path.insert(0, self.config_dir) return path data_dir = Unicode() def _data_dir_default(self) -> str: d = jupyter_data_dir() ensure_dir_exists(d, mode=0o700) return d runtime_dir = Unicode() def _runtime_dir_default(self) -> str: rd = jupyter_runtime_dir() ensure_dir_exists(rd, mode=0o700) return rd @observe("runtime_dir") def _runtime_dir_changed(self, change: t.Any) -> None: ensure_dir_exists(change["new"], mode=0o700) generate_config = Bool(False, config=True, help="""Generate default config file.""") config_file_name = Unicode(config=True, help="Specify a config file to load.") def _config_file_name_default(self) -> str: if not self.name: return "" return self.name.replace("-", "_") + "_config" config_file = Unicode( config=True, help="""Full path of a config file.""", ) answer_yes = Bool(False, config=True, help="""Answer yes to any prompts.""") def write_default_config(self) -> None: """Write our default config to a .py config file""" if self.config_file: config_file = self.config_file else: config_file = str(Path(self.config_dir, self.config_file_name + ".py")) if Path(config_file).exists() and not self.answer_yes: answer = "" def ask() -> str: prompt = "Overwrite %s with default config? [y/N]" % config_file try: return input(prompt).lower() or "n" except KeyboardInterrupt: print("") # empty line return "n" answer = ask() while not answer.startswith(("y", "n")): print("Please answer 'yes' or 'no'") answer = ask() if answer.startswith("n"): return config_text = self.generate_config_file() print("Writing default config to: %s" % config_file) ensure_dir_exists(Path(config_file).parent.resolve(), 0o700) with Path.open(Path(config_file), mode="w", encoding="utf-8") as f: f.write(config_text) def migrate_config(self) -> None: """Migrate config/data from IPython 3""" try: # let's see if we can open the marker file # for reading and updating (writing) f_marker = Path.open(Path(self.config_dir, "migrated"), "r+") except FileNotFoundError: # cannot find the marker file pass # that means we have not migrated yet, so continue except OSError: # not readable and/or writable return # so let's give up migration in such an environment else: # if we got here without raising anything, # that means the file exists f_marker.close() return # so we must have already migrated -> bail out from .migrate import get_ipython_dir, migrate # No IPython dir, nothing to migrate if not Path(get_ipython_dir()).exists(): return migrate() def load_config_file(self, suppress_errors: bool = True) -> None: # type:ignore[override] """Load the config file. By default, errors in loading config are handled, and a warning printed on screen. For testing, the suppress_errors option is set to False, so errors will make tests fail. """ self.log.debug("Searching %s for config files", self.config_file_paths) base_config = "jupyter_config" try: super().load_config_file( base_config, path=self.config_file_paths, ) except ConfigFileNotFound: # ignore errors loading parent self.log.debug("Config file %s not found", base_config) if self.config_file: path, config_file_name = os.path.split(self.config_file) else: path = self.config_file_paths # type:ignore[assignment] config_file_name = self.config_file_name if not config_file_name or (config_file_name == base_config): return try: super().load_config_file(config_file_name, path=path) except ConfigFileNotFound: self.log.debug("Config file not found, skipping: %s", config_file_name) except Exception: # Reraise errors for testing purposes, or if set in # self.raise_config_file_errors if (not suppress_errors) or self.raise_config_file_errors: raise self.log.warning("Error loading config file: %s", config_file_name, exc_info=True) # subcommand-related def _find_subcommand(self, name: str) -> str: name = f"{self.name}-{name}" return which(name) or "" @property def _dispatching(self) -> bool: """Return whether we are dispatching to another command or running ourselves. """ return bool(self.generate_config or self.subapp or self.subcommand) subcommand = Unicode() @catch_config_error def initialize(self, argv: t.Any = None) -> None: """Initialize the application.""" # don't hook up crash handler before parsing command-line if argv is None: argv = sys.argv[1:] if argv: subc = self._find_subcommand(argv[0]) if subc: self.argv = argv self.subcommand = subc return self.parse_command_line(argv) cl_config = deepcopy(self.config) if self._dispatching: return self.migrate_config() self.load_config_file() # enforce cl-opts override configfile opts: self.update_config(cl_config) if allow_insecure_writes: issue_insecure_write_warning() def start(self) -> None: """Start the whole thing""" if self.subcommand: os.execv(self.subcommand, [self.subcommand] + self.argv[1:]) # noqa: S606 raise NoStart() if self.subapp: self.subapp.start() raise NoStart() if self.generate_config: self.write_default_config() raise NoStart() @classmethod def launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None: """Launch an instance of a Jupyter Application""" # Ensure an event loop is set before any other code runs. loop = ensure_event_loop() try: super().launch_instance(argv=argv, **kwargs) except NoStart: return loop.close() class JupyterAsyncApp(JupyterApp): """A Jupyter application that runs on an asyncio loop.""" name = "jupyter_async" # override in subclasses description = "An Async Jupyter Application" # Set to True for tornado-based apps. _prefer_selector_loop = False async def initialize_async(self, argv: t.Any = None) -> None: """Initialize the application asynchronoously.""" async def start_async(self) -> None: """Run the application in an event loop.""" @classmethod async def _launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None: app = cls.instance(**kwargs) app.initialize(argv) await app.initialize_async(argv) await app.start_async() @classmethod def launch_instance(cls, argv: t.Any = None, **kwargs: t.Any) -> None: """Launch an instance of an async Jupyter Application""" loop = ensure_event_loop(cls._prefer_selector_loop) coro = cls._launch_instance(argv, **kwargs) loop.run_until_complete(coro) loop.close() if __name__ == "__main__": JupyterApp.launch_instance() jupyter_core-5.7.2/jupyter_core/command.py000066400000000000000000000365231457404620400207600ustar00rootroot00000000000000# PYTHON_ARGCOMPLETE_OK """The root `jupyter` command. This does nothing other than dispatch to subcommands or output path info. """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import argparse import errno import json import os import site import sys import sysconfig from pathlib import Path from shutil import which from subprocess import Popen from typing import Any from . import paths from .version import __version__ class JupyterParser(argparse.ArgumentParser): """A Jupyter argument parser.""" @property def epilog(self) -> str | None: """Add subcommands to epilog on request Avoids searching PATH for subcommands unless help output is requested. """ return "Available subcommands: %s" % " ".join(list_subcommands()) @epilog.setter def epilog(self, x: Any) -> None: """Ignore epilog set in Parser.__init__""" def argcomplete(self) -> None: """Trigger auto-completion, if enabled""" try: import argcomplete # type: ignore[import-not-found] argcomplete.autocomplete(self) except ImportError: pass def jupyter_parser() -> JupyterParser: """Create a jupyter parser object.""" parser = JupyterParser( description="Jupyter: Interactive Computing", ) group = parser.add_mutually_exclusive_group(required=False) # don't use argparse's version action because it prints to stderr on py2 group.add_argument( "--version", action="store_true", help="show the versions of core jupyter packages and exit" ) subcommand_action = group.add_argument( "subcommand", type=str, nargs="?", help="the subcommand to launch" ) # For argcomplete, supply all known subcommands subcommand_action.completer = lambda *args, **kwargs: list_subcommands() # type: ignore[attr-defined] # noqa: ARG005 group.add_argument("--config-dir", action="store_true", help="show Jupyter config dir") group.add_argument("--data-dir", action="store_true", help="show Jupyter data dir") group.add_argument("--runtime-dir", action="store_true", help="show Jupyter runtime dir") group.add_argument( "--paths", action="store_true", help="show all Jupyter paths. Add --json for machine-readable format.", ) parser.add_argument("--json", action="store_true", help="output paths as machine-readable json") parser.add_argument("--debug", action="store_true", help="output debug information about paths") return parser def list_subcommands() -> list[str]: """List all jupyter subcommands searches PATH for `jupyter-name` Returns a list of jupyter's subcommand names, without the `jupyter-` prefix. Nested children (e.g. jupyter-sub-subsub) are not included. """ subcommand_tuples = set() # construct a set of `('foo', 'bar') from `jupyter-foo-bar` for d in _path_with_self(): try: names = os.listdir(d) except OSError: continue for name in names: if name.startswith("jupyter-"): if sys.platform.startswith("win"): # remove file-extension on Windows name = os.path.splitext(name)[0] # noqa: PTH122, PLW2901 subcommand_tuples.add(tuple(name.split("-")[1:])) # build a set of subcommand strings, excluding subcommands whose parents are defined subcommands = set() # Only include `jupyter-foo-bar` if `jupyter-foo` is not already present for sub_tup in subcommand_tuples: if not any(sub_tup[:i] in subcommand_tuples for i in range(1, len(sub_tup))): subcommands.add("-".join(sub_tup)) return sorted(subcommands) def _execvp(cmd: str, argv: list[str]) -> None: """execvp, except on Windows where it uses Popen Python provides execvp on Windows, but its behavior is problematic (Python bug#9148). """ if sys.platform.startswith("win"): # PATH is ignored when shell=False, # so rely on shutil.which cmd_path = which(cmd) if cmd_path is None: raise OSError("%r not found" % cmd, errno.ENOENT) p = Popen([cmd_path] + argv[1:]) # noqa: S603 # Don't raise KeyboardInterrupt in the parent process. # Set this after spawning, to avoid subprocess inheriting handler. import signal signal.signal(signal.SIGINT, signal.SIG_IGN) p.wait() sys.exit(p.returncode) else: os.execvp(cmd, argv) # noqa: S606 def _jupyter_abspath(subcommand: str) -> str: """This method get the abspath of a specified jupyter-subcommand with no changes on ENV. """ # get env PATH with self search_path = os.pathsep.join(_path_with_self()) # get the abs path for the jupyter- jupyter_subcommand = f"jupyter-{subcommand}" abs_path = which(jupyter_subcommand, path=search_path) if abs_path is None: msg = f"\nJupyter command `{jupyter_subcommand}` not found." raise Exception(msg) if not os.access(abs_path, os.X_OK): msg = f"\nJupyter command `{jupyter_subcommand}` is not executable." raise Exception(msg) return abs_path def _path_with_self() -> list[str]: """Put `jupyter`'s dir at the front of PATH Ensures that /path/to/jupyter subcommand will do /path/to/jupyter-subcommand even if /other/jupyter-subcommand is ahead of it on PATH """ path_list = (os.environ.get("PATH") or os.defpath).split(os.pathsep) # Insert the "scripts" directory for this Python installation # This allows the "jupyter" command to be relocated, while still # finding subcommands that have been installed in the default # location. # We put the scripts directory at the *end* of PATH, so that # if the user explicitly overrides a subcommand, that override # still takes effect. try: bindir = sysconfig.get_path("scripts") except KeyError: # The Python environment does not specify a "scripts" location pass else: path_list.append(bindir) scripts = [sys.argv[0]] if Path(scripts[0]).is_symlink(): # include realpath, if `jupyter` is a symlink scripts.append(os.path.realpath(scripts[0])) for script in scripts: bindir = str(Path(script).parent) if Path(bindir).is_dir() and os.access(script, os.X_OK): # only if it's a script # ensure executable's dir is on PATH # avoids missing subcommands when jupyter is run via absolute path path_list.insert(0, bindir) return path_list def _evaluate_argcomplete(parser: JupyterParser) -> list[str]: """If argcomplete is enabled, trigger autocomplete or return current words If the first word looks like a subcommand, return the current command that is attempting to be completed so that the subcommand can evaluate it; otherwise auto-complete using the main parser. """ try: # traitlets >= 5.8 provides some argcomplete support, # use helper methods to jump to argcomplete from traitlets.config.argcomplete_config import ( get_argcomplete_cwords, increment_argcomplete_index, ) cwords = get_argcomplete_cwords() if cwords and len(cwords) > 1 and not cwords[1].startswith("-"): # If first completion word looks like a subcommand, # increment word from which to start handling arguments increment_argcomplete_index() return cwords # Otherwise no subcommand, directly autocomplete and exit parser.argcomplete() except ImportError: # traitlets >= 5.8 not available, just try to complete this without # worrying about subcommands parser.argcomplete() msg = "Control flow should not reach end of autocomplete()" raise AssertionError(msg) def main() -> None: """The command entry point.""" parser = jupyter_parser() argv = sys.argv subcommand = None if "_ARGCOMPLETE" in os.environ: argv = _evaluate_argcomplete(parser) subcommand = argv[1] elif len(argv) > 1 and not argv[1].startswith("-"): # Don't parse if a subcommand is given # Avoids argparse gobbling up args passed to subcommand, such as `-h`. subcommand = argv[1] else: args, opts = parser.parse_known_args() subcommand = args.subcommand if args.version: print("Selected Jupyter core packages...") for package in [ "IPython", "ipykernel", "ipywidgets", "jupyter_client", "jupyter_core", "jupyter_server", "jupyterlab", "nbclient", "nbconvert", "nbformat", "notebook", "qtconsole", "traitlets", ]: try: if package == "jupyter_core": # We're already here version = __version__ else: mod = __import__(package) version = mod.__version__ except ImportError: version = "not installed" print(f"{package:<17}:", version) return if args.json and not args.paths: sys.exit("--json is only used with --paths") if args.debug and not args.paths: sys.exit("--debug is only used with --paths") if args.debug and args.json: sys.exit("--debug cannot be used with --json") if args.config_dir: print(paths.jupyter_config_dir()) return if args.data_dir: print(paths.jupyter_data_dir()) return if args.runtime_dir: print(paths.jupyter_runtime_dir()) return if args.paths: data = {} data["runtime"] = [paths.jupyter_runtime_dir()] data["config"] = paths.jupyter_config_path() data["data"] = paths.jupyter_path() if args.json: print(json.dumps(data)) else: if args.debug: env = os.environ if paths.use_platform_dirs(): print( "JUPYTER_PLATFORM_DIRS is set to a true value, so we use platformdirs to find platform-specific directories" ) else: print( "JUPYTER_PLATFORM_DIRS is set to a false value, or is not set, so we use hardcoded legacy paths for platform-specific directories" ) if paths.prefer_environment_over_user(): print( "JUPYTER_PREFER_ENV_PATH is set to a true value, or JUPYTER_PREFER_ENV_PATH is not set and we detected a virtual environment, making the environment-level path preferred over the user-level path for data and config" ) else: print( "JUPYTER_PREFER_ENV_PATH is set to a false value, or JUPYTER_PREFER_ENV_PATH is not set and we did not detect a virtual environment, making the user-level path preferred over the environment-level path for data and config" ) # config path list if env.get("JUPYTER_NO_CONFIG"): print( "JUPYTER_NO_CONFIG is set, making the config path list only a single temporary directory" ) else: print( "JUPYTER_NO_CONFIG is not set, so we use the full path list for config" ) if env.get("JUPYTER_CONFIG_PATH"): print( f"JUPYTER_CONFIG_PATH is set to '{env.get('JUPYTER_CONFIG_PATH')}', which is prepended to the config path list (unless JUPYTER_NO_CONFIG is set)" ) else: print( "JUPYTER_CONFIG_PATH is not set, so we do not prepend anything to the config paths" ) if env.get("JUPYTER_CONFIG_DIR"): print( f"JUPYTER_CONFIG_DIR is set to '{env.get('JUPYTER_CONFIG_DIR')}', overriding the default user-level config directory" ) else: print( "JUPYTER_CONFIG_DIR is not set, so we use the default user-level config directory" ) if site.ENABLE_USER_SITE: print( f"Python's site.ENABLE_USER_SITE is True, so we add the user site directory '{site.getuserbase()}'" ) else: print( f"Python's site.ENABLE_USER_SITE is not True, so we do not add the Python site user directory '{site.getuserbase()}'" ) # data path list if env.get("JUPYTER_PATH"): print( f"JUPYTER_PATH is set to '{env.get('JUPYTER_PATH')}', which is prepended to the data paths" ) else: print( "JUPYTER_PATH is not set, so we do not prepend anything to the data paths" ) if env.get("JUPYTER_DATA_DIR"): print( f"JUPYTER_DATA_DIR is set to '{env.get('JUPYTER_DATA_DIR')}', overriding the default user-level data directory" ) else: print( "JUPYTER_DATA_DIR is not set, so we use the default user-level data directory" ) # runtime directory if env.get("JUPYTER_RUNTIME_DIR"): print( f"JUPYTER_RUNTIME_DIR is set to '{env.get('JUPYTER_RUNTIME_DIR')}', overriding the default runtime directory" ) else: print( "JUPYTER_RUNTIME_DIR is not set, so we use the default runtime directory" ) print() for name in sorted(data): path = data[name] print("%s:" % name) for p in path: print(" " + p) return if not subcommand: parser.print_help(file=sys.stderr) sys.exit("\nPlease specify a subcommand or one of the optional arguments.") try: command = _jupyter_abspath(subcommand) except Exception as e: parser.print_help(file=sys.stderr) # special-case alias of "jupyter help" to "jupyter --help" if subcommand == "help": return sys.exit(str(e)) try: _execvp(command, [command] + argv[2:]) except OSError as e: sys.exit(f"Error executing Jupyter command {subcommand!r}: {e}") if __name__ == "__main__": main() jupyter_core-5.7.2/jupyter_core/migrate.py000066400000000000000000000207221457404620400207640ustar00rootroot00000000000000# PYTHON_ARGCOMPLETE_OK """Migrating IPython < 4.0 to Jupyter This *copies* configuration and resources to their new locations in Jupyter Migrations: - .ipython/ - nbextensions -> JUPYTER_DATA_DIR/nbextensions - kernels -> JUPYTER_DATA_DIR/kernels - .ipython/profile_default/ - static/custom -> .jupyter/custom - nbconfig -> .jupyter/nbconfig - security/ - notebook_secret, notebook_cookie_secret, nbsignatures.db -> JUPYTER_DATA_DIR - ipython_{notebook,nbconvert,qtconsole}_config.py -> .jupyter/jupyter_{name}_config.py """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import os import re import shutil from datetime import datetime, timezone from pathlib import Path from typing import Any from traitlets.config.loader import JSONFileConfigLoader, PyFileConfigLoader from traitlets.log import get_logger from .application import JupyterApp from .paths import jupyter_config_dir, jupyter_data_dir from .utils import ensure_dir_exists # mypy: disable-error-code="no-untyped-call" migrations = { str(Path("{ipython_dir}", "nbextensions")): str(Path("{jupyter_data}", "nbextensions")), str(Path("{ipython_dir}", "kernels")): str(Path("{jupyter_data}", "kernels")), str(Path("{profile}", "nbconfig")): str(Path("{jupyter_config}", "nbconfig")), } custom_src_t = str(Path("{profile}", "static", "custom")) custom_dst_t = str(Path("{jupyter_config}", "custom")) for security_file in ("notebook_secret", "notebook_cookie_secret", "nbsignatures.db"): src = str(Path("{profile}", "security", security_file)) dst = str(Path("{jupyter_data}", security_file)) migrations[src] = dst config_migrations = ["notebook", "nbconvert", "qtconsole"] regex = re.compile config_substitutions = { regex(r"\bIPythonQtConsoleApp\b"): "JupyterQtConsoleApp", regex(r"\bIPythonWidget\b"): "JupyterWidget", regex(r"\bRichIPythonWidget\b"): "RichJupyterWidget", regex(r"\bIPython\.html\b"): "notebook", regex(r"\bIPython\.nbconvert\b"): "nbconvert", } def get_ipython_dir() -> str: """Return the IPython directory location. Not imported from IPython because the IPython implementation ensures that a writable directory exists, creating a temporary directory if not. We don't want to trigger that when checking if migration should happen. We only need to support the IPython < 4 behavior for migration, so importing for forward-compatibility and edge cases is not important. """ return os.environ.get("IPYTHONDIR", str(Path("~/.ipython").expanduser())) def migrate_dir(src: str, dst: str) -> bool: """Migrate a directory from src to dst""" log = get_logger() if not os.listdir(src): log.debug("No files in %s", src) return False if Path(dst).exists(): if os.listdir(dst): # already exists, non-empty log.debug("%s already exists", dst) return False Path(dst).rmdir() log.info("Copying %s -> %s", src, dst) ensure_dir_exists(Path(dst).parent) shutil.copytree(src, dst, symlinks=True) return True def migrate_file(src: str | Path, dst: str | Path, substitutions: Any = None) -> bool: """Migrate a single file from src to dst substitutions is an optional dict of {regex: replacement} for performing replacements on the file. """ log = get_logger() if Path(dst).exists(): # already exists log.debug("%s already exists", dst) return False log.info("Copying %s -> %s", src, dst) ensure_dir_exists(Path(dst).parent) shutil.copy(src, dst) if substitutions: with Path.open(Path(dst), encoding="utf-8") as f: text = f.read() for pat, replacement in substitutions.items(): text = pat.sub(replacement, text) with Path.open(Path(dst), "w", encoding="utf-8") as f: f.write(text) return True def migrate_one(src: str, dst: str) -> bool: """Migrate one item dispatches to migrate_dir/_file """ log = get_logger() if Path(src).is_file(): return migrate_file(src, dst) if Path(src).is_dir(): return migrate_dir(src, dst) log.debug("Nothing to migrate for %s", src) return False def migrate_static_custom(src: str, dst: str) -> bool: """Migrate non-empty custom.js,css from src to dst src, dst are 'custom' directories containing custom.{js,css} """ log = get_logger() migrated = False custom_js = Path(src, "custom.js") custom_css = Path(src, "custom.css") # check if custom_js is empty: custom_js_empty = True if Path(custom_js).is_file(): with Path.open(custom_js, encoding="utf-8") as f: js = f.read().strip() for line in js.splitlines(): if not (line.isspace() or line.strip().startswith(("/*", "*", "//"))): custom_js_empty = False break # check if custom_css is empty: custom_css_empty = True if Path(custom_css).is_file(): with Path.open(custom_css, encoding="utf-8") as f: css = f.read().strip() custom_css_empty = css.startswith("/*") and css.endswith("*/") if custom_js_empty: log.debug("Ignoring empty %s", custom_js) if custom_css_empty: log.debug("Ignoring empty %s", custom_css) if custom_js_empty and custom_css_empty: # nothing to migrate return False ensure_dir_exists(dst) if not custom_js_empty or not custom_css_empty: ensure_dir_exists(dst) if not custom_js_empty and migrate_file(custom_js, Path(dst, "custom.js")): migrated = True if not custom_css_empty and migrate_file(custom_css, Path(dst, "custom.css")): migrated = True return migrated def migrate_config(name: str, env: Any) -> list[Any]: """Migrate a config file. Includes substitutions for updated configurable names. """ log = get_logger() src_base = str(Path(f"{env['profile']}", f"ipython_{name}_config")) dst_base = str(Path(f"{env['jupyter_config']}", f"jupyter_{name}_config")) loaders = { ".py": PyFileConfigLoader, ".json": JSONFileConfigLoader, } migrated = [] for ext in (".py", ".json"): src = src_base + ext dst = dst_base + ext if Path(src).exists(): cfg = loaders[ext](src).load_config() if cfg: if migrate_file(src, dst, substitutions=config_substitutions): migrated.append(src) else: # don't migrate empty config files log.debug("Not migrating empty config file: %s", src) return migrated def migrate() -> bool: """Migrate IPython configuration to Jupyter""" env = { "jupyter_data": jupyter_data_dir(), "jupyter_config": jupyter_config_dir(), "ipython_dir": get_ipython_dir(), "profile": str(Path(get_ipython_dir(), "profile_default")), } migrated = False for src_t, dst_t in migrations.items(): src = src_t.format(**env) dst = dst_t.format(**env) if Path(src).exists() and migrate_one(src, dst): migrated = True for name in config_migrations: if migrate_config(name, env): migrated = True custom_src = custom_src_t.format(**env) custom_dst = custom_dst_t.format(**env) if Path(custom_src).exists() and migrate_static_custom(custom_src, custom_dst): migrated = True # write a marker to avoid re-running migration checks ensure_dir_exists(env["jupyter_config"]) with Path.open(Path(env["jupyter_config"], "migrated"), "w", encoding="utf-8") as f: f.write(datetime.now(tz=timezone.utc).isoformat()) return migrated class JupyterMigrate(JupyterApp): """A Jupyter Migration App.""" name = "jupyter-migrate" description = """ Migrate configuration and data from .ipython prior to 4.0 to Jupyter locations. This migrates: - config files in the default profile - kernels in ~/.ipython/kernels - notebook javascript extensions in ~/.ipython/extensions - custom.js/css to .jupyter/custom to their new Jupyter locations. All files are copied, not moved. If the destinations already exist, nothing will be done. """ def start(self) -> None: """Start the application.""" if not migrate(): self.log.info("Found nothing to migrate.") main = JupyterMigrate.launch_instance if __name__ == "__main__": main() jupyter_core-5.7.2/jupyter_core/paths.py000066400000000000000000001060201457404620400204470ustar00rootroot00000000000000"""Path utility functions.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. # Derived from IPython.utils.path, which is # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import errno import os import site import stat import sys import tempfile import warnings from contextlib import contextmanager from pathlib import Path from typing import Any, Iterator, Optional import platformdirs from .utils import deprecation pjoin = os.path.join # Capitalize Jupyter in paths only on Windows and MacOS (when not in Homebrew) if sys.platform == "win32" or ( sys.platform == "darwin" and not sys.prefix.startswith("/opt/homebrew") ): APPNAME = "Jupyter" else: APPNAME = "jupyter" # UF_HIDDEN is a stat flag not defined in the stat module. # It is used by BSD to indicate hidden files. UF_HIDDEN = getattr(stat, "UF_HIDDEN", 32768) def envset(name: str, default: Optional[bool] = False) -> Optional[bool]: """Return the boolean value of a given environment variable. An environment variable is considered set if it is assigned to a value other than 'no', 'n', 'false', 'off', '0', or '0.0' (case insensitive) If the environment variable is not defined, the default value is returned. """ if name not in os.environ: return default return os.environ[name].lower() not in ["no", "n", "false", "off", "0", "0.0"] def use_platform_dirs() -> bool: """Determine if platformdirs should be used for system-specific paths. We plan for this to default to False in jupyter_core version 5 and to True in jupyter_core version 6. """ return envset("JUPYTER_PLATFORM_DIRS", False) # type:ignore[return-value] def get_home_dir() -> str: """Get the real path of the home directory""" homedir = Path("~").expanduser() # Next line will make things work even when /home/ is a symlink to # /usr/home as it is on FreeBSD, for example return str(Path(homedir).resolve()) _dtemps: dict[str, str] = {} def _do_i_own(path: str) -> bool: """Return whether the current user owns the given path""" p = Path(path).resolve() # walk up to first existing parent while not p.exists() and p != p.parent: p = p.parent # simplest check: owner by name # not always implemented or available try: return p.owner() == os.getlogin() except Exception: # noqa: S110 pass if hasattr(os, "geteuid"): try: st = p.stat() return st.st_uid == os.geteuid() except (NotImplementedError, OSError): # geteuid not always implemented pass # no ownership checks worked, check write access return os.access(p, os.W_OK) def prefer_environment_over_user() -> bool: """Determine if environment-level paths should take precedence over user-level paths.""" # If JUPYTER_PREFER_ENV_PATH is defined, that signals user intent, so return its value if "JUPYTER_PREFER_ENV_PATH" in os.environ: return envset("JUPYTER_PREFER_ENV_PATH") # type:ignore[return-value] # If we are in a Python virtualenv, default to True (see https://docs.python.org/3/library/venv.html#venv-def) if sys.prefix != sys.base_prefix and _do_i_own(sys.prefix): return True # If sys.prefix indicates Python comes from a conda/mamba environment that is not the root environment, default to True if ( "CONDA_PREFIX" in os.environ and sys.prefix.startswith(os.environ["CONDA_PREFIX"]) and os.environ.get("CONDA_DEFAULT_ENV", "base") != "base" and _do_i_own(sys.prefix) ): return True return False def _mkdtemp_once(name: str) -> str: """Make or reuse a temporary directory. If this is called with the same name in the same process, it will return the same directory. """ try: return _dtemps[name] except KeyError: d = _dtemps[name] = tempfile.mkdtemp(prefix=name + "-") return d def jupyter_config_dir() -> str: """Get the Jupyter config directory for this platform and user. Returns JUPYTER_CONFIG_DIR if defined, otherwise the appropriate directory for the platform. """ env = os.environ if env.get("JUPYTER_NO_CONFIG"): return _mkdtemp_once("jupyter-clean-cfg") if env.get("JUPYTER_CONFIG_DIR"): return env["JUPYTER_CONFIG_DIR"] if use_platform_dirs(): return platformdirs.user_config_dir(APPNAME, appauthor=False) home_dir = get_home_dir() return pjoin(home_dir, ".jupyter") def jupyter_data_dir() -> str: """Get the config directory for Jupyter data files for this platform and user. These are non-transient, non-configuration files. Returns JUPYTER_DATA_DIR if defined, else a platform-appropriate path. """ env = os.environ if env.get("JUPYTER_DATA_DIR"): return env["JUPYTER_DATA_DIR"] if use_platform_dirs(): return platformdirs.user_data_dir(APPNAME, appauthor=False) home = get_home_dir() if sys.platform == "darwin": return str(Path(home, "Library", "Jupyter")) if sys.platform == "win32": appdata = os.environ.get("APPDATA", None) if appdata: return str(Path(appdata, "jupyter").resolve()) return pjoin(jupyter_config_dir(), "data") # Linux, non-OS X Unix, AIX, etc. xdg = env.get("XDG_DATA_HOME", None) if not xdg: xdg = pjoin(home, ".local", "share") return pjoin(xdg, "jupyter") def jupyter_runtime_dir() -> str: """Return the runtime dir for transient jupyter files. Returns JUPYTER_RUNTIME_DIR if defined. The default is now (data_dir)/runtime on all platforms; we no longer use XDG_RUNTIME_DIR after various problems. """ env = os.environ if env.get("JUPYTER_RUNTIME_DIR"): return env["JUPYTER_RUNTIME_DIR"] return pjoin(jupyter_data_dir(), "runtime") if use_platform_dirs(): SYSTEM_JUPYTER_PATH = platformdirs.site_data_dir( APPNAME, appauthor=False, multipath=True ).split(os.pathsep) else: deprecation( "Jupyter is migrating its paths to use standard platformdirs\n" "given by the platformdirs library. To remove this warning and\n" "see the appropriate new directories, set the environment variable\n" "`JUPYTER_PLATFORM_DIRS=1` and then run `jupyter --paths`.\n" "The use of platformdirs will be the default in `jupyter_core` v6" ) if os.name == "nt": programdata = os.environ.get("PROGRAMDATA", None) if programdata: SYSTEM_JUPYTER_PATH = [pjoin(programdata, "jupyter")] else: # PROGRAMDATA is not defined by default on XP. SYSTEM_JUPYTER_PATH = [str(Path(sys.prefix, "share", "jupyter"))] else: SYSTEM_JUPYTER_PATH = [ "/usr/local/share/jupyter", "/usr/share/jupyter", ] ENV_JUPYTER_PATH: list[str] = [str(Path(sys.prefix, "share", "jupyter"))] def jupyter_path(*subdirs: str) -> list[str]: """Return a list of directories to search for data files JUPYTER_PATH environment variable has highest priority. If the JUPYTER_PREFER_ENV_PATH environment variable is set, the environment-level directories will have priority over user-level directories. If the Python site.ENABLE_USER_SITE variable is True, we also add the appropriate Python user site subdirectory to the user-level directories. If ``*subdirs`` are given, that subdirectory will be added to each element. Examples: >>> jupyter_path() ['~/.local/jupyter', '/usr/local/share/jupyter'] >>> jupyter_path('kernels') ['~/.local/jupyter/kernels', '/usr/local/share/jupyter/kernels'] """ paths: list[str] = [] # highest priority is explicit environment variable if os.environ.get("JUPYTER_PATH"): paths.extend(p.rstrip(os.sep) for p in os.environ["JUPYTER_PATH"].split(os.pathsep)) # Next is environment or user, depending on the JUPYTER_PREFER_ENV_PATH flag user = [jupyter_data_dir()] if site.ENABLE_USER_SITE: # Check if site.getuserbase() exists to be compatible with virtualenv, # which often does not have this method. userbase: Optional[str] userbase = site.getuserbase() if hasattr(site, "getuserbase") else site.USER_BASE if userbase: userdir = str(Path(userbase, "share", "jupyter")) if userdir not in user: user.append(userdir) env = [p for p in ENV_JUPYTER_PATH if p not in SYSTEM_JUPYTER_PATH] if prefer_environment_over_user(): paths.extend(env) paths.extend(user) else: paths.extend(user) paths.extend(env) # finally, system paths.extend(SYSTEM_JUPYTER_PATH) # add subdir, if requested if subdirs: paths = [pjoin(p, *subdirs) for p in paths] return paths if use_platform_dirs(): SYSTEM_CONFIG_PATH = platformdirs.site_config_dir( APPNAME, appauthor=False, multipath=True ).split(os.pathsep) else: if os.name == "nt": programdata = os.environ.get("PROGRAMDATA", None) if programdata: # noqa: SIM108 SYSTEM_CONFIG_PATH = [str(Path(programdata, "jupyter"))] else: # PROGRAMDATA is not defined by default on XP. SYSTEM_CONFIG_PATH = [] else: SYSTEM_CONFIG_PATH = [ "/usr/local/etc/jupyter", "/etc/jupyter", ] ENV_CONFIG_PATH: list[str] = [str(Path(sys.prefix, "etc", "jupyter"))] def jupyter_config_path() -> list[str]: """Return the search path for Jupyter config files as a list. If the JUPYTER_PREFER_ENV_PATH environment variable is set, the environment-level directories will have priority over user-level directories. If the Python site.ENABLE_USER_SITE variable is True, we also add the appropriate Python user site subdirectory to the user-level directories. """ if os.environ.get("JUPYTER_NO_CONFIG"): # jupyter_config_dir makes a blank config when JUPYTER_NO_CONFIG is set. return [jupyter_config_dir()] paths: list[str] = [] # highest priority is explicit environment variable if os.environ.get("JUPYTER_CONFIG_PATH"): paths.extend(p.rstrip(os.sep) for p in os.environ["JUPYTER_CONFIG_PATH"].split(os.pathsep)) # Next is environment or user, depending on the JUPYTER_PREFER_ENV_PATH flag user = [jupyter_config_dir()] if site.ENABLE_USER_SITE: userbase: Optional[str] # Check if site.getuserbase() exists to be compatible with virtualenv, # which often does not have this method. userbase = site.getuserbase() if hasattr(site, "getuserbase") else site.USER_BASE if userbase: userdir = str(Path(userbase, "etc", "jupyter")) if userdir not in user: user.append(userdir) env = [p for p in ENV_CONFIG_PATH if p not in SYSTEM_CONFIG_PATH] if prefer_environment_over_user(): paths.extend(env) paths.extend(user) else: paths.extend(user) paths.extend(env) # Finally, system path paths.extend(SYSTEM_CONFIG_PATH) return paths def exists(path: str) -> bool: """Replacement for `os.path.exists` which works for host mapped volumes on Windows containers """ try: os.lstat(path) except OSError: return False return True def is_file_hidden_win(abs_path: str, stat_res: Optional[Any] = None) -> bool: """Is a file hidden? This only checks the file itself; it should be called in combination with checking the directory containing the file. Use is_hidden() instead to check the file and its parent directories. Parameters ---------- abs_path : unicode The absolute path to check. stat_res : os.stat_result, optional The result of calling stat() on abs_path. If not passed, this function will call stat() internally. """ if Path(abs_path).name.startswith("."): return True if stat_res is None: try: stat_res = Path(abs_path).stat() except OSError as e: if e.errno == errno.ENOENT: return False raise try: if ( stat_res.st_file_attributes # type:ignore[union-attr] & stat.FILE_ATTRIBUTE_HIDDEN # type:ignore[attr-defined] ): return True except AttributeError: # allow AttributeError on PyPy for Windows # 'stat_result' object has no attribute 'st_file_attributes' # https://foss.heptapod.net/pypy/pypy/-/issues/3469 warnings.warn( "hidden files are not detectable on this system, so no file will be marked as hidden.", stacklevel=2, ) return False def is_file_hidden_posix(abs_path: str, stat_res: Optional[Any] = None) -> bool: """Is a file hidden? This only checks the file itself; it should be called in combination with checking the directory containing the file. Use is_hidden() instead to check the file and its parent directories. Parameters ---------- abs_path : unicode The absolute path to check. stat_res : os.stat_result, optional The result of calling stat() on abs_path. If not passed, this function will call stat() internally. """ if Path(abs_path).name.startswith("."): return True if stat_res is None or stat.S_ISLNK(stat_res.st_mode): try: stat_res = Path(abs_path).stat() except OSError as e: if e.errno == errno.ENOENT: return False raise # check that dirs can be listed if stat.S_ISDIR(stat_res.st_mode): # noqa: SIM102 # use x-access, not actual listing, in case of slow/large listings if not os.access(abs_path, os.X_OK | os.R_OK): return True # check UF_HIDDEN if getattr(stat_res, "st_flags", 0) & UF_HIDDEN: return True return False if sys.platform == "win32": is_file_hidden = is_file_hidden_win else: is_file_hidden = is_file_hidden_posix def is_hidden(abs_path: str, abs_root: str = "") -> bool: """Is a file hidden or contained in a hidden directory? This will start with the rightmost path element and work backwards to the given root to see if a path is hidden or in a hidden directory. Hidden is determined by either name starting with '.' or the UF_HIDDEN flag as reported by stat. If abs_path is the same directory as abs_root, it will be visible even if that is a hidden folder. This only checks the visibility of files and directories *within* abs_root. Parameters ---------- abs_path : unicode The absolute path to check for hidden directories. abs_root : unicode The absolute path of the root directory in which hidden directories should be checked for. """ abs_path = os.path.normpath(abs_path) abs_root = os.path.normpath(abs_root) if abs_path == abs_root: return False if is_file_hidden(abs_path): return True if not abs_root: abs_root = abs_path.split(os.sep, 1)[0] + os.sep inside_root = abs_path[len(abs_root) :] if any(part.startswith(".") for part in Path(inside_root).parts): return True # check UF_HIDDEN on any location up to root. # is_file_hidden() already checked the file, so start from its parent dir path = str(Path(abs_path).parent) while path and path.startswith(abs_root) and path != abs_root: if not Path(path).exists(): path = str(Path(path).parent) continue try: # may fail on Windows junctions st = os.lstat(path) except OSError: return True if getattr(st, "st_flags", 0) & UF_HIDDEN: return True path = str(Path(path).parent) return False def win32_restrict_file_to_user(fname: str) -> None: """Secure a windows file to read-only access for the user. Follows guidance from win32 library creator: http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html This method should be executed against an already generated file which has no secrets written to it yet. Parameters ---------- fname : unicode The path to the file to secure """ try: import win32api except ImportError: return _win32_restrict_file_to_user_ctypes(fname) import ntsecuritycon as con import win32security # everyone, _domain, _type = win32security.LookupAccountName("", "Everyone") admins = win32security.CreateWellKnownSid(win32security.WinBuiltinAdministratorsSid) user, _domain, _type = win32security.LookupAccountName( "", win32api.GetUserNameEx(win32api.NameSamCompatible) ) sd = win32security.GetFileSecurity(fname, win32security.DACL_SECURITY_INFORMATION) dacl = win32security.ACL() # dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, everyone) dacl.AddAccessAllowedAce( win32security.ACL_REVISION, con.FILE_GENERIC_READ | con.FILE_GENERIC_WRITE | con.DELETE, user, ) dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, admins) sd.SetSecurityDescriptorDacl(1, dacl, 0) win32security.SetFileSecurity(fname, win32security.DACL_SECURITY_INFORMATION, sd) return None def _win32_restrict_file_to_user_ctypes(fname: str) -> None: """Secure a windows file to read-only access for the user. Follows guidance from win32 library creator: http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html This method should be executed against an already generated file which has no secrets written to it yet. Parameters ---------- fname : unicode The path to the file to secure """ import ctypes from ctypes import wintypes advapi32 = ctypes.WinDLL("advapi32", use_last_error=True) # type:ignore[attr-defined] secur32 = ctypes.WinDLL("secur32", use_last_error=True) # type:ignore[attr-defined] NameSamCompatible = 2 WinBuiltinAdministratorsSid = 26 DACL_SECURITY_INFORMATION = 4 ACL_REVISION = 2 ERROR_INSUFFICIENT_BUFFER = 122 ERROR_MORE_DATA = 234 SYNCHRONIZE = 0x100000 DELETE = 0x00010000 STANDARD_RIGHTS_REQUIRED = 0xF0000 STANDARD_RIGHTS_READ = 0x20000 STANDARD_RIGHTS_WRITE = 0x20000 FILE_READ_DATA = 1 FILE_READ_EA = 8 FILE_READ_ATTRIBUTES = 128 FILE_WRITE_DATA = 2 FILE_APPEND_DATA = 4 FILE_WRITE_EA = 16 FILE_WRITE_ATTRIBUTES = 256 FILE_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x1FF FILE_GENERIC_READ = ( STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE ) FILE_GENERIC_WRITE = ( STANDARD_RIGHTS_WRITE | FILE_WRITE_DATA | FILE_WRITE_ATTRIBUTES | FILE_WRITE_EA | FILE_APPEND_DATA | SYNCHRONIZE ) class ACL(ctypes.Structure): _fields_ = [ ("AclRevision", wintypes.BYTE), ("Sbz1", wintypes.BYTE), ("AclSize", wintypes.WORD), ("AceCount", wintypes.WORD), ("Sbz2", wintypes.WORD), ] PSID = ctypes.c_void_p PACL = ctypes.POINTER(ACL) PSECURITY_DESCRIPTOR = ctypes.POINTER(wintypes.BYTE) def _nonzero_success(result: int, func: Any, args: Any) -> Any: # noqa: ARG001 if not result: raise ctypes.WinError(ctypes.get_last_error()) # type:ignore[attr-defined] return args secur32.GetUserNameExW.errcheck = _nonzero_success secur32.GetUserNameExW.restype = wintypes.BOOL secur32.GetUserNameExW.argtypes = ( ctypes.c_int, # EXTENDED_NAME_FORMAT NameFormat wintypes.LPWSTR, # LPWSTR lpNameBuffer, wintypes.PULONG, # PULONG nSize ) advapi32.CreateWellKnownSid.errcheck = _nonzero_success advapi32.CreateWellKnownSid.restype = wintypes.BOOL advapi32.CreateWellKnownSid.argtypes = ( wintypes.DWORD, # WELL_KNOWN_SID_TYPE WellKnownSidType PSID, # PSID DomainSid PSID, # PSID pSid wintypes.PDWORD, # DWORD *cbSid ) advapi32.LookupAccountNameW.errcheck = _nonzero_success advapi32.LookupAccountNameW.restype = wintypes.BOOL advapi32.LookupAccountNameW.argtypes = ( wintypes.LPWSTR, # LPCWSTR lpSystemName wintypes.LPWSTR, # LPCWSTR lpAccountName PSID, # PSID Sid wintypes.LPDWORD, # LPDWORD cbSid wintypes.LPWSTR, # LPCWSTR ReferencedDomainName wintypes.LPDWORD, # LPDWORD cchReferencedDomainName wintypes.LPDWORD, # PSID_NAME_USE peUse ) advapi32.AddAccessAllowedAce.errcheck = _nonzero_success advapi32.AddAccessAllowedAce.restype = wintypes.BOOL advapi32.AddAccessAllowedAce.argtypes = ( PACL, # PACL pAcl wintypes.DWORD, # DWORD dwAceRevision wintypes.DWORD, # DWORD AccessMask PSID, # PSID pSid ) advapi32.SetSecurityDescriptorDacl.errcheck = _nonzero_success advapi32.SetSecurityDescriptorDacl.restype = wintypes.BOOL advapi32.SetSecurityDescriptorDacl.argtypes = ( PSECURITY_DESCRIPTOR, # PSECURITY_DESCRIPTOR pSecurityDescriptor wintypes.BOOL, # BOOL bDaclPresent PACL, # PACL pDacl wintypes.BOOL, # BOOL bDaclDefaulted ) advapi32.GetFileSecurityW.errcheck = _nonzero_success advapi32.GetFileSecurityW.restype = wintypes.BOOL advapi32.GetFileSecurityW.argtypes = ( wintypes.LPCWSTR, # LPCWSTR lpFileName wintypes.DWORD, # SECURITY_INFORMATION RequestedInformation PSECURITY_DESCRIPTOR, # PSECURITY_DESCRIPTOR pSecurityDescriptor wintypes.DWORD, # DWORD nLength wintypes.LPDWORD, # LPDWORD lpnLengthNeeded ) advapi32.SetFileSecurityW.errcheck = _nonzero_success advapi32.SetFileSecurityW.restype = wintypes.BOOL advapi32.SetFileSecurityW.argtypes = ( wintypes.LPCWSTR, # LPCWSTR lpFileName wintypes.DWORD, # SECURITY_INFORMATION SecurityInformation PSECURITY_DESCRIPTOR, # PSECURITY_DESCRIPTOR pSecurityDescriptor ) advapi32.MakeAbsoluteSD.errcheck = _nonzero_success advapi32.MakeAbsoluteSD.restype = wintypes.BOOL advapi32.MakeAbsoluteSD.argtypes = ( PSECURITY_DESCRIPTOR, # pSelfRelativeSecurityDescriptor PSECURITY_DESCRIPTOR, # pAbsoluteSecurityDescriptor wintypes.LPDWORD, # LPDWORD lpdwAbsoluteSecurityDescriptorSize PACL, # PACL pDacl wintypes.LPDWORD, # LPDWORD lpdwDaclSize PACL, # PACL pSacl wintypes.LPDWORD, # LPDWORD lpdwSaclSize PSID, # PSID pOwner wintypes.LPDWORD, # LPDWORD lpdwOwnerSize PSID, # PSID pPrimaryGroup wintypes.LPDWORD, # LPDWORD lpdwPrimaryGroupSize ) advapi32.MakeSelfRelativeSD.errcheck = _nonzero_success advapi32.MakeSelfRelativeSD.restype = wintypes.BOOL advapi32.MakeSelfRelativeSD.argtypes = ( PSECURITY_DESCRIPTOR, # pAbsoluteSecurityDescriptor PSECURITY_DESCRIPTOR, # pSelfRelativeSecurityDescriptor wintypes.LPDWORD, # LPDWORD lpdwBufferLength ) advapi32.InitializeAcl.errcheck = _nonzero_success advapi32.InitializeAcl.restype = wintypes.BOOL advapi32.InitializeAcl.argtypes = ( PACL, # PACL pAcl, wintypes.DWORD, # DWORD nAclLength, wintypes.DWORD, # DWORD dwAclRevision ) def CreateWellKnownSid(WellKnownSidType: Any) -> Any: # return a SID for predefined aliases pSid = (ctypes.c_char * 1)() cbSid = wintypes.DWORD() try: advapi32.CreateWellKnownSid(WellKnownSidType, None, pSid, ctypes.byref(cbSid)) except OSError as e: if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined] raise pSid = (ctypes.c_char * cbSid.value)() advapi32.CreateWellKnownSid(WellKnownSidType, None, pSid, ctypes.byref(cbSid)) return pSid[:] def GetUserNameEx(NameFormat: Any) -> Any: # return the user or other security principal associated with # the calling thread nSize = ctypes.pointer(ctypes.c_ulong(0)) try: secur32.GetUserNameExW(NameFormat, None, nSize) except OSError as e: if e.winerror != ERROR_MORE_DATA: # type:ignore[attr-defined] raise if not nSize.contents.value: return None lpNameBuffer = ctypes.create_unicode_buffer(nSize.contents.value) secur32.GetUserNameExW(NameFormat, lpNameBuffer, nSize) return lpNameBuffer.value def LookupAccountName(lpSystemName: Any, lpAccountName: Any) -> Any: # return a security identifier (SID) for an account on a system # and the name of the domain on which the account was found cbSid = wintypes.DWORD(0) cchReferencedDomainName = wintypes.DWORD(0) peUse = wintypes.DWORD(0) try: advapi32.LookupAccountNameW( lpSystemName, lpAccountName, None, ctypes.byref(cbSid), None, ctypes.byref(cchReferencedDomainName), ctypes.byref(peUse), ) except OSError as e: if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined] raise Sid = ctypes.create_unicode_buffer("", cbSid.value) pSid = ctypes.cast(ctypes.pointer(Sid), wintypes.LPVOID) lpReferencedDomainName = ctypes.create_unicode_buffer("", cchReferencedDomainName.value + 1) success = advapi32.LookupAccountNameW( lpSystemName, lpAccountName, pSid, ctypes.byref(cbSid), lpReferencedDomainName, ctypes.byref(cchReferencedDomainName), ctypes.byref(peUse), ) if not success: raise ctypes.WinError() # type:ignore[attr-defined] return pSid, lpReferencedDomainName.value, peUse.value def AddAccessAllowedAce(pAcl: Any, dwAceRevision: Any, AccessMask: Any, pSid: Any) -> Any: # add an access-allowed access control entry (ACE) # to an access control list (ACL) advapi32.AddAccessAllowedAce(pAcl, dwAceRevision, AccessMask, pSid) def GetFileSecurity(lpFileName: Any, RequestedInformation: Any) -> Any: # return information about the security of a file or directory nLength = wintypes.DWORD(0) try: advapi32.GetFileSecurityW( lpFileName, RequestedInformation, None, 0, ctypes.byref(nLength), ) except OSError as e: if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined] raise if not nLength.value: return None pSecurityDescriptor = (wintypes.BYTE * nLength.value)() advapi32.GetFileSecurityW( lpFileName, RequestedInformation, pSecurityDescriptor, nLength, ctypes.byref(nLength), ) return pSecurityDescriptor def SetFileSecurity( lpFileName: Any, RequestedInformation: Any, pSecurityDescriptor: Any ) -> Any: # set the security of a file or directory object advapi32.SetFileSecurityW(lpFileName, RequestedInformation, pSecurityDescriptor) def SetSecurityDescriptorDacl( pSecurityDescriptor: Any, bDaclPresent: Any, pDacl: Any, bDaclDefaulted: Any ) -> Any: # set information in a discretionary access control list (DACL) advapi32.SetSecurityDescriptorDacl(pSecurityDescriptor, bDaclPresent, pDacl, bDaclDefaulted) def MakeAbsoluteSD(pSelfRelativeSecurityDescriptor: Any) -> Any: # return a security descriptor in absolute format # by using a security descriptor in self-relative format as a template pAbsoluteSecurityDescriptor = None lpdwAbsoluteSecurityDescriptorSize = wintypes.DWORD(0) pDacl = None lpdwDaclSize = wintypes.DWORD(0) pSacl = None lpdwSaclSize = wintypes.DWORD(0) pOwner = None lpdwOwnerSize = wintypes.DWORD(0) pPrimaryGroup = None lpdwPrimaryGroupSize = wintypes.DWORD(0) try: advapi32.MakeAbsoluteSD( pSelfRelativeSecurityDescriptor, pAbsoluteSecurityDescriptor, ctypes.byref(lpdwAbsoluteSecurityDescriptorSize), pDacl, ctypes.byref(lpdwDaclSize), pSacl, ctypes.byref(lpdwSaclSize), pOwner, ctypes.byref(lpdwOwnerSize), pPrimaryGroup, ctypes.byref(lpdwPrimaryGroupSize), ) except OSError as e: if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined] raise pAbsoluteSecurityDescriptor = (wintypes.BYTE * lpdwAbsoluteSecurityDescriptorSize.value)() pDaclData = (wintypes.BYTE * lpdwDaclSize.value)() pDacl = ctypes.cast(pDaclData, PACL).contents pSaclData = (wintypes.BYTE * lpdwSaclSize.value)() pSacl = ctypes.cast(pSaclData, PACL).contents pOwnerData = (wintypes.BYTE * lpdwOwnerSize.value)() pOwner = ctypes.cast(pOwnerData, PSID) pPrimaryGroupData = (wintypes.BYTE * lpdwPrimaryGroupSize.value)() pPrimaryGroup = ctypes.cast(pPrimaryGroupData, PSID) advapi32.MakeAbsoluteSD( pSelfRelativeSecurityDescriptor, pAbsoluteSecurityDescriptor, ctypes.byref(lpdwAbsoluteSecurityDescriptorSize), pDacl, ctypes.byref(lpdwDaclSize), pSacl, ctypes.byref(lpdwSaclSize), pOwner, lpdwOwnerSize, pPrimaryGroup, ctypes.byref(lpdwPrimaryGroupSize), ) return pAbsoluteSecurityDescriptor def MakeSelfRelativeSD(pAbsoluteSecurityDescriptor: Any) -> Any: # return a security descriptor in self-relative format # by using a security descriptor in absolute format as a template pSelfRelativeSecurityDescriptor = None lpdwBufferLength = wintypes.DWORD(0) try: advapi32.MakeSelfRelativeSD( pAbsoluteSecurityDescriptor, pSelfRelativeSecurityDescriptor, ctypes.byref(lpdwBufferLength), ) except OSError as e: if e.winerror != ERROR_INSUFFICIENT_BUFFER: # type:ignore[attr-defined] raise pSelfRelativeSecurityDescriptor = (wintypes.BYTE * lpdwBufferLength.value)() advapi32.MakeSelfRelativeSD( pAbsoluteSecurityDescriptor, pSelfRelativeSecurityDescriptor, ctypes.byref(lpdwBufferLength), ) return pSelfRelativeSecurityDescriptor def NewAcl() -> Any: # return a new, initialized ACL (access control list) structure nAclLength = 32767 # TODO: calculate this: ctypes.sizeof(ACL) + ? acl_data = ctypes.create_string_buffer(nAclLength) pAcl = ctypes.cast(acl_data, PACL).contents advapi32.InitializeAcl(pAcl, nAclLength, ACL_REVISION) return pAcl SidAdmins = CreateWellKnownSid(WinBuiltinAdministratorsSid) SidUser = LookupAccountName("", GetUserNameEx(NameSamCompatible))[0] Acl = NewAcl() AddAccessAllowedAce(Acl, ACL_REVISION, FILE_ALL_ACCESS, SidAdmins) AddAccessAllowedAce( Acl, ACL_REVISION, FILE_GENERIC_READ | FILE_GENERIC_WRITE | DELETE, SidUser, ) SelfRelativeSD = GetFileSecurity(fname, DACL_SECURITY_INFORMATION) AbsoluteSD = MakeAbsoluteSD(SelfRelativeSD) SetSecurityDescriptorDacl(AbsoluteSD, 1, Acl, 0) SelfRelativeSD = MakeSelfRelativeSD(AbsoluteSD) SetFileSecurity(fname, DACL_SECURITY_INFORMATION, SelfRelativeSD) def get_file_mode(fname: str) -> int: """Retrieves the file mode corresponding to fname in a filesystem-tolerant manner. Parameters ---------- fname : unicode The path to the file to get mode from """ # Some filesystems (e.g., CIFS) auto-enable the execute bit on files. As a result, we # should tolerate the execute bit on the file's owner when validating permissions - thus # the missing least significant bit on the third octal digit. In addition, we also tolerate # the sticky bit being set, so the lsb from the fourth octal digit is also removed. return ( stat.S_IMODE(Path(fname).stat().st_mode) & 0o6677 ) # Use 4 octal digits since S_IMODE does the same allow_insecure_writes = os.getenv("JUPYTER_ALLOW_INSECURE_WRITES", "false").lower() in ("true", "1") @contextmanager def secure_write(fname: str, binary: bool = False) -> Iterator[Any]: """Opens a file in the most restricted pattern available for writing content. This limits the file mode to `0o0600` and yields the resulting opened filed handle. Parameters ---------- fname : unicode The path to the file to write binary: boolean Indicates that the file is binary """ mode = "wb" if binary else "w" encoding = None if binary else "utf-8" open_flag = os.O_CREAT | os.O_WRONLY | os.O_TRUNC try: Path(fname).unlink() except OSError: # Skip any issues with the file not existing pass if os.name == "nt": if allow_insecure_writes: # Mounted file systems can have a number of failure modes inside this block. # For windows machines in insecure mode we simply skip this to avoid failures :/ issue_insecure_write_warning() else: # Python on windows does not respect the group and public bits for chmod, so we need # to take additional steps to secure the contents. # Touch file pre-emptively to avoid editing permissions in open files in Windows fd = os.open(fname, open_flag, 0o0600) os.close(fd) open_flag = os.O_WRONLY | os.O_TRUNC win32_restrict_file_to_user(fname) with os.fdopen(os.open(fname, open_flag, 0o0600), mode, encoding=encoding) as f: if os.name != "nt": # Enforce that the file got the requested permissions before writing file_mode = get_file_mode(fname) if file_mode != 0o0600: if allow_insecure_writes: issue_insecure_write_warning() else: msg = ( f"Permissions assignment failed for secure file: '{fname}'." f" Got '{oct(file_mode)}' instead of '0o0600'." ) raise RuntimeError(msg) yield f def issue_insecure_write_warning() -> None: """Issue an insecure write warning.""" def format_warning(msg: str, *args: Any, **kwargs: Any) -> str: # noqa: ARG001 return str(msg) + "\n" warnings.formatwarning = format_warning # type:ignore[assignment] warnings.warn( "WARNING: Insecure writes have been enabled via environment variable " "'JUPYTER_ALLOW_INSECURE_WRITES'! If this is not intended, remove the " "variable or set its value to 'False'.", stacklevel=2, ) jupyter_core-5.7.2/jupyter_core/py.typed000066400000000000000000000000001457404620400204440ustar00rootroot00000000000000jupyter_core-5.7.2/jupyter_core/troubleshoot.py000077500000000000000000000061671457404620400220770ustar00rootroot00000000000000#!/usr/bin/env python """ display environment information that is frequently used to troubleshoot installations of Jupyter or IPython """ from __future__ import annotations import os import platform import subprocess import sys from typing import Any, Optional, Union def subs(cmd: Union[list[str], str]) -> Optional[str]: """ get data from commands that we need to run outside of python """ try: stdout = subprocess.check_output(cmd) # noqa: S603 return stdout.decode("utf-8", "replace").strip() except (OSError, subprocess.CalledProcessError): return None def get_data() -> dict[str, Any]: """ returns a dict of various user environment data """ env: dict[str, Any] = {} env["path"] = os.environ.get("PATH") env["sys_path"] = sys.path env["sys_exe"] = sys.executable env["sys_version"] = sys.version env["platform"] = platform.platform() # FIXME: which on Windows? if sys.platform == "win32": env["where"] = subs(["where", "jupyter"]) env["which"] = None else: env["which"] = subs(["which", "-a", "jupyter"]) env["where"] = None env["pip"] = subs([sys.executable, "-m", "pip", "list"]) env["conda"] = subs(["conda", "list"]) env["conda-env"] = subs(["conda", "env", "export"]) return env def main() -> None: """ print out useful info """ # pylint: disable=superfluous-parens # args = get_args() if "_ARGCOMPLETE" in os.environ: # No arguments to complete, the script can be slow to run to completion, # so in case someone tries to complete jupyter troubleshoot just exit early return environment_data = get_data() print("$PATH:") for directory in environment_data["path"].split(os.pathsep): print(f"\t{directory}") print("\nsys.path:") for directory in environment_data["sys_path"]: print(f"\t{directory}") print("\nsys.executable:") print(f'\t{environment_data["sys_exe"]}') print("\nsys.version:") if "\n" in environment_data["sys_version"]: for data in environment_data["sys_version"].split("\n"): print(f"\t{data}") else: print(f'\t{environment_data["sys_version"]}') print("\nplatform.platform():") print(f'\t{environment_data["platform"]}') if environment_data["which"]: print("\nwhich -a jupyter:") for line in environment_data["which"].split("\n"): print(f"\t{line}") if environment_data["where"]: print("\nwhere jupyter:") for line in environment_data["where"].split("\n"): print(f"\t{line}") if environment_data["pip"]: print("\npip list:") for package in environment_data["pip"].split("\n"): print(f"\t{package}") if environment_data["conda"]: print("\nconda list:") for package in environment_data["conda"].split("\n"): print(f"\t{package}") if environment_data["conda-env"]: print("\nconda env:") for package in environment_data["conda-env"].split("\n"): print(f"\t{package}") if __name__ == "__main__": main() jupyter_core-5.7.2/jupyter_core/utils/000077500000000000000000000000001457404620400201175ustar00rootroot00000000000000jupyter_core-5.7.2/jupyter_core/utils/__init__.py000066400000000000000000000157451457404620400222440ustar00rootroot00000000000000# Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import asyncio import atexit import errno import inspect import sys import threading import warnings from contextvars import ContextVar from pathlib import Path from types import FrameType from typing import Any, Awaitable, Callable, TypeVar, cast def ensure_dir_exists(path: str | Path, mode: int = 0o777) -> None: """Ensure that a directory exists If it doesn't exist, try to create it, protecting against a race condition if another process is doing the same. The default permissions are determined by the current umask. """ try: Path(path).mkdir(parents=True, mode=mode) except OSError as e: if e.errno != errno.EEXIST: raise if not Path(path).is_dir(): raise OSError("%r exists but is not a directory" % path) def _get_frame(level: int) -> FrameType | None: """Get the frame at the given stack level.""" # sys._getframe is much faster than inspect.stack, but isn't guaranteed to # exist in all python implementations, so we fall back to inspect.stack() # We need to add one to level to account for this get_frame call. if hasattr(sys, "_getframe"): frame = sys._getframe(level + 1) else: frame = inspect.stack(context=0)[level + 1].frame return frame # This function is from https://github.com/python/cpython/issues/67998 # (https://bugs.python.org/file39550/deprecated_module_stacklevel.diff) and # calculates the appropriate stacklevel for deprecations to target the # deprecation for the caller, no matter how many internal stack frames we have # added in the process. For example, with the deprecation warning in the # __init__ below, the appropriate stacklevel will change depending on how deep # the inheritance hierarchy is. def _external_stacklevel(internal: list[str]) -> int: """Find the stacklevel of the first frame that doesn't contain any of the given internal strings The depth will be 1 at minimum in order to start checking at the caller of the function that called this utility method. """ # Get the level of my caller's caller level = 2 frame = _get_frame(level) # Normalize the path separators: normalized_internal = [str(Path(s)) for s in internal] # climb the stack frames while we see internal frames while frame and any(s in str(Path(frame.f_code.co_filename)) for s in normalized_internal): level += 1 frame = frame.f_back # Return the stack level from the perspective of whoever called us (i.e., one level up) return level - 1 def deprecation(message: str, internal: str | list[str] = "jupyter_core/") -> None: """Generate a deprecation warning targeting the first frame that is not 'internal' internal is a string or list of strings, which if they appear in filenames in the frames, the frames will be considered internal. Changing this can be useful if, for example, we know that our internal code is calling out to another library. """ _internal: list[str] _internal = [internal] if isinstance(internal, str) else internal # stack level of the first external frame from here stacklevel = _external_stacklevel(_internal) # The call to .warn adds one frame, so bump the stacklevel up by one warnings.warn(message, DeprecationWarning, stacklevel=stacklevel + 1) T = TypeVar("T") class _TaskRunner: """A task runner that runs an asyncio event loop on a background thread.""" def __init__(self) -> None: self.__io_loop: asyncio.AbstractEventLoop | None = None self.__runner_thread: threading.Thread | None = None self.__lock = threading.Lock() atexit.register(self._close) def _close(self) -> None: if self.__io_loop: self.__io_loop.stop() def _runner(self) -> None: loop = self.__io_loop assert loop is not None try: loop.run_forever() finally: loop.close() def run(self, coro: Any) -> Any: """Synchronously run a coroutine on a background thread.""" with self.__lock: name = f"{threading.current_thread().name} - runner" if self.__io_loop is None: self.__io_loop = asyncio.new_event_loop() self.__runner_thread = threading.Thread(target=self._runner, daemon=True, name=name) self.__runner_thread.start() fut = asyncio.run_coroutine_threadsafe(coro, self.__io_loop) return fut.result(None) _runner_map: dict[str, _TaskRunner] = {} _loop: ContextVar[asyncio.AbstractEventLoop | None] = ContextVar("_loop", default=None) def run_sync(coro: Callable[..., Awaitable[T]]) -> Callable[..., T]: """Wraps coroutine in a function that blocks until it has executed. Parameters ---------- coro : coroutine-function The coroutine-function to be executed. Returns ------- result : Whatever the coroutine-function returns. """ if not inspect.iscoroutinefunction(coro): raise AssertionError def wrapped(*args: Any, **kwargs: Any) -> Any: name = threading.current_thread().name inner = coro(*args, **kwargs) try: # If a loop is currently running in this thread, # use a task runner. asyncio.get_running_loop() if name not in _runner_map: _runner_map[name] = _TaskRunner() return _runner_map[name].run(inner) except RuntimeError: pass # Run the loop for this thread. loop = ensure_event_loop() return loop.run_until_complete(inner) wrapped.__doc__ = coro.__doc__ return wrapped def ensure_event_loop(prefer_selector_loop: bool = False) -> asyncio.AbstractEventLoop: # Get the loop for this thread, or create a new one. loop = _loop.get() if loop is not None and not loop.is_closed(): return loop try: loop = asyncio.get_running_loop() except RuntimeError: if sys.platform == "win32" and prefer_selector_loop: loop = asyncio.WindowsSelectorEventLoopPolicy().new_event_loop() else: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) _loop.set(loop) return loop async def ensure_async(obj: Awaitable[T] | T) -> T: """Convert a non-awaitable object to a coroutine if needed, and await it if it was not already awaited. This function is meant to be called on the result of calling a function, when that function could either be asynchronous or not. """ if inspect.isawaitable(obj): obj = cast(Awaitable[T], obj) try: result = await obj except RuntimeError as e: if str(e) == "cannot reuse already awaited coroutine": # obj is already the coroutine's result return cast(T, obj) raise return result # obj doesn't need to be awaited return cast(T, obj) jupyter_core-5.7.2/jupyter_core/version.py000066400000000000000000000010321457404620400210120ustar00rootroot00000000000000""" store the current version info of the jupyter_core. """ from __future__ import annotations import re # Version string must appear intact for hatch versioning __version__ = "5.7.2" # Build up version_info tuple for backwards compatibility pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" match = re.match(pattern, __version__) assert match is not None parts: list[object] = [int(match[part]) for part in ["major", "minor", "patch"]] if match["rest"]: parts.append(match["rest"]) version_info = tuple(parts) jupyter_core-5.7.2/pyproject.toml000066400000000000000000000145351457404620400171710ustar00rootroot00000000000000[build-system] requires = ["hatchling>=1.4"] build-backend = "hatchling.build" [project] name = "jupyter_core" description = "Jupyter core package. A base package on which Jupyter projects rely." license = { file = "LICENSE" } classifiers = [ "Framework :: Jupyter", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 3" ] requires-python = ">=3.8" dependencies = [ "platformdirs>=2.5", "traitlets>=5.3", "pywin32>=300 ; sys_platform == 'win32' and platform_python_implementation != 'PyPy'" ] dynamic = ["version"] [[project.authors]] name = "Jupyter Development Team" email = "jupyter@googlegroups.org" [project.readme] text = "There is no reason to install this package on its own." content-type = "text/plain" [project.urls] Homepage = "https://jupyter.org" Documentation = "https://jupyter-core.readthedocs.io/" Funding = "https://numfocus.org/" Source = "https://github.com/jupyter/jupyter_core" Tracker = "https://github.com/jupyter/jupyter_core/issues" [project.optional-dependencies] test = [ "ipykernel", "pre-commit", "pytest<8", "pytest-cov", "pytest-timeout" ] docs = [ "myst-parser", "sphinxcontrib_github_alt", "sphinxcontrib_spelling", "sphinx-autodoc-typehints", "traitlets", "pydata_sphinx_theme", ] [project.scripts] jupyter = "jupyter_core.command:main" jupyter-migrate = "jupyter_core.migrate:main" jupyter-troubleshoot = "jupyter_core.troubleshoot:main" [tool.hatch.version] path = "jupyter_core/version.py" [tool.hatch.build.force-include] "./jupyter.py" = "jupyter.py" [tool.hatch.envs.docs] features = ["docs"] [tool.hatch.envs.docs.env-vars] SPHINXOPTS = "-W -n" [tool.hatch.envs.docs.scripts] build = "make -C docs html" api = "sphinx-apidoc -o docs/api -f -E jupyter_core jupyter_core/tests/*" [tool.hatch.envs.test] features = ["test"] [tool.hatch.envs.test.scripts] test = "python -m pytest -vv {args}" nowarn = "test -W default {args}" [tool.hatch.envs.cov] features = ["test"] dependencies = ["coverage", "pytest-cov"] [tool.hatch.envs.cov.scripts] test = "python -m pytest -vv --cov jupyter_core --cov-branch --cov-report term-missing:skip-covered {args}" nowarn = "test -W default {args}" [tool.hatch.envs.typing] dependencies = ["pre-commit"] detached = true [tool.hatch.envs.typing.scripts] test = "pre-commit run --all-files --hook-stage manual mypy" [tool.hatch.envs.lint] dependencies = ["pre-commit"] detached = true [tool.hatch.envs.lint.scripts] build = [ "pre-commit run --all-files ruff", "pre-commit run --all-files ruff-format" ] [tool.mypy] files = "jupyter_core" python_version = "3.8" strict = true enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] warn_unreachable = true [tool.pytest.ini_options] minversion = "7.0" xfail_strict = true log_cli_level = "info" addopts = [ "-raXs", "--durations=10", "--color=yes", "--doctest-modules", "--showlocals", "--strict-markers", "--strict-config", "--ignore-glob=tests/dotipython*" ] testpaths = [ "tests/" ] filterwarnings= [ # Fail on warnings "error", # Expected internal warnings "module:Jupyter is migrating its paths to use standard platformdirs:DeprecationWarning", ] [tool.coverage.run] relative_files = true source = ["jupyter_core"] [tool.coverage.report] exclude_lines = [ "pragma: no cover", "def __repr__", "if self.debug:", "if settings.DEBUG", "raise AssertionError", "raise NotImplementedError", "if 0:", "if __name__ == .__main__.:", "class .*\bProtocol\\):", "@(abc\\.)?abstractmethod", ] [tool.ruff] line-length = 100 [tool.ruff.lint] extend-select = [ "B", # flake8-bugbear "I", # isort "ARG", # flake8-unused-arguments "C4", # flake8-comprehensions "EM", # flake8-errmsg "ICN", # flake8-import-conventions "G", # flake8-logging-format "PGH", # pygrep-hooks "PIE", # flake8-pie "PL", # pylint "PT", # flake8-pytest-style "PTH", # flake8-use-pathlib "RET", # flake8-return "RUF", # Ruff-specific "SIM", # flake8-simplify "T20", # flake8-print "UP", # pyupgrade "YTT", # flake8-2020 "EXE", # flake8-executable "NPY", # NumPy specific rules "PD", # pandas-vet "PYI", # flake8-pyi "S", # flake8-bandit ] ignore = [ "PLR", # Design related pylint codes "Q000", # Single quotes found but double quotes preferred "E501", # Line too long (158 > 100 characters) "UP007", # Use `X | Y` for type annotations" "SIM105", # Use `contextlib.suppress(...)` "S101", # Use of assert "RUF012" # Mutable class attributes should be annotated ] unfixable = [ "T201", # Don't touch print statements "RUF100", # Don't touch noqa lines "F401" # Unused imports ] isort.required-imports = ["from __future__ import annotations"] [tool.ruff.lint.per-file-ignores] # B011 Do not call assert False since python -O removes these calls # F841 local variable 'foo' is assigned to but never used # C408 Unnecessary `dict` call # E402 Module level import not at top of file # T201 `print` found # B007 Loop control variable `i` not used within the loop body. # N802 Function name `assertIn` should be lowercase # PLR2004 Magic value used in comparison, consider replacing b'WITNESS A' with a constant variable # S603 `subprocess` call: check for execution of untrusted input "tests/*" = ["B011", "F841", "C408", "E402", "T201", "B007", "N802", "S", "PTH", "ARG0"] # F821 Undefined name `get_config` "tests/**/profile_default/*_config.py" = ["F821"] # T201 `print` found "jupyter_core/application.py" = ["T201"] "jupyter_core/command.py" = ["T201"] "jupyter_core/troubleshoot.py" = ["T201"] # N802 Function name `SetFileSecurity` should be lowercase "jupyter_core/paths.py" = ["N802", "N803", "N806"] # C901 Function is too complex "jupyter_core/migrate.py" = ["C901"] # `migrate_static_custom` is too complex (11 > 10) [tool.interrogate] ignore-init-module=true ignore-private=true ignore-semiprivate=true ignore-property-decorators=true ignore-nested-functions=true ignore-nested-classes=true fail-under=100 exclude = ["docs", "tests"] [tool.check-wheel-contents] toplevel = ["jupyter_core/", "jupyter.py"] ignore = ["W002"] jupyter_core-5.7.2/scripts/000077500000000000000000000000001457404620400157345ustar00rootroot00000000000000jupyter_core-5.7.2/scripts/jupyter000077500000000000000000000002551457404620400173660ustar00rootroot00000000000000#!/usr/bin/env python """Launch the root jupyter command""" from __future__ import annotations from jupyter_core.command import main if __name__ == "__main__": main() jupyter_core-5.7.2/scripts/jupyter-migrate000077500000000000000000000003171457404620400210130ustar00rootroot00000000000000#!/usr/bin/env python # PYTHON_ARGCOMPLETE_OK """Migrate Jupyter config from IPython < 4.0""" from __future__ import annotations from jupyter_core.migrate import main if __name__ == "__main__": main() jupyter_core-5.7.2/tests/000077500000000000000000000000001457404620400154075ustar00rootroot00000000000000jupyter_core-5.7.2/tests/__init__.py000066400000000000000000000000001457404620400175060ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython/000077500000000000000000000000001457404620400176105ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython/nbextensions/000077500000000000000000000000001457404620400223275ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython/nbextensions/myext.js000066400000000000000000000000131457404620400240250ustar00rootroot00000000000000var hello; jupyter_core-5.7.2/tests/dotipython/profile_default/000077500000000000000000000000001457404620400227545ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython/profile_default/ipython_config.py000066400000000000000000000502061457404620400263500ustar00rootroot00000000000000# Configuration file for ipython. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # InteractiveShellApp configuration # ------------------------------------------------------------------------------ # A Mixin for applications that start InteractiveShell instances. # # Provides configurables for loading extensions and executing files as part of # configuring a Shell environment. # # The following methods should be called by the :meth:`initialize` method of the # subclass: # # - :meth:`init_path` # - :meth:`init_shell` (to be implemented by the subclass) # - :meth:`init_gui_pylab` # - :meth:`init_extensions` # - :meth:`init_code` # lines of code to run at IPython startup. # c.InteractiveShellApp.exec_lines = [] # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.InteractiveShellApp.hide_initial_ns = True # A list of dotted module names of IPython extensions to load. # c.InteractiveShellApp.extensions = [] # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.InteractiveShellApp.gui = None # A file to be run # c.InteractiveShellApp.file_to_run = '' # Configure matplotlib for interactive use with the default matplotlib backend. # c.InteractiveShellApp.matplotlib = None # Reraise exceptions encountered loading IPython extensions? # c.InteractiveShellApp.reraise_ipython_extension_failures = False # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.InteractiveShellApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.InteractiveShellApp.pylab = None # Run the module as a script. # c.InteractiveShellApp.module_to_run = '' # dotted module name of an IPython extension to load. # c.InteractiveShellApp.extra_extension = '' # List of files to run at IPython startup. # c.InteractiveShellApp.exec_files = [] # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.InteractiveShellApp.pylab_import_all = True # Execute the given command string. # c.InteractiveShellApp.code_to_run = '' # ------------------------------------------------------------------------------ # TerminalIPythonApp configuration # ------------------------------------------------------------------------------ # TerminalIPythonApp will inherit config from: BaseIPythonApplication, # Application, InteractiveShellApp # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.TerminalIPythonApp.hide_initial_ns = True # A list of dotted module names of IPython extensions to load. # c.TerminalIPythonApp.extensions = [] # Execute the given command string. # c.TerminalIPythonApp.code_to_run = '' # The date format used by logging formatters for %(asctime)s # c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # Reraise exceptions encountered loading IPython extensions? # c.TerminalIPythonApp.reraise_ipython_extension_failures = False # Set the log level by value or name. # c.TerminalIPythonApp.log_level = 30 # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.TerminalIPythonApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.TerminalIPythonApp.pylab = None # Run the module as a script. # c.TerminalIPythonApp.module_to_run = '' # Whether to display a banner upon starting IPython. # c.TerminalIPythonApp.display_banner = True # dotted module name of an IPython extension to load. # c.TerminalIPythonApp.extra_extension = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.TerminalIPythonApp.verbose_crash = False # Whether to overwrite existing config files when copying # c.TerminalIPythonApp.overwrite = False # The IPython profile to use. # c.TerminalIPythonApp.profile = 'default' # If a command or file is given via the command-line, e.g. 'ipython foo.py', # start an interactive shell after executing the file or command. # c.TerminalIPythonApp.force_interact = False # List of files to run at IPython startup. # c.TerminalIPythonApp.exec_files = [] # Start IPython quickly by skipping the loading of config files. # c.TerminalIPythonApp.quick = False # The Logging format template # c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.TerminalIPythonApp.copy_config_files = False # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.TerminalIPythonApp.extra_config_file = '' # lines of code to run at IPython startup. # c.TerminalIPythonApp.exec_lines = [] # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.TerminalIPythonApp.gui = None # A file to be run # c.TerminalIPythonApp.file_to_run = '' # Configure matplotlib for interactive use with the default matplotlib backend. # c.TerminalIPythonApp.matplotlib = None # Suppress warning messages about legacy config files # c.TerminalIPythonApp.ignore_old_config = False # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.TerminalIPythonApp.ipython_dir = '' # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.TerminalIPythonApp.pylab_import_all = True # ------------------------------------------------------------------------------ # TerminalInteractiveShell configuration # ------------------------------------------------------------------------------ # TerminalInteractiveShell will inherit config from: InteractiveShell # # c.TerminalInteractiveShell.object_info_string_level = 0 # # c.TerminalInteractiveShell.separate_out = '' # Automatically call the pdb debugger after every exception. # c.TerminalInteractiveShell.pdb = False # # c.TerminalInteractiveShell.ipython_dir = '' # # c.TerminalInteractiveShell.history_length = 10000 # # c.TerminalInteractiveShell.readline_remove_delims = '-/~' # auto editing of files with syntax errors. # c.TerminalInteractiveShell.autoedit_syntax = False # If True, anything that would be passed to the pager will be displayed as # regular output instead. # c.TerminalInteractiveShell.display_page = False # # c.TerminalInteractiveShell.debug = False # # c.TerminalInteractiveShell.separate_in = '\n' # Start logging to the default log file in overwrite mode. Use `logappend` to # specify a log file to **append** logs to. # c.TerminalInteractiveShell.logstart = False # Set the size of the output cache. The default is 1000, you can change it # permanently in your config file. Setting it to 0 completely disables the # caching system, and the minimum value accepted is 20 (if you provide a value # less than 20, it is reset to 0 and a warning is issued). This limit is # defined because otherwise you'll spend more time re-flushing a too small cache # than working # c.TerminalInteractiveShell.cache_size = 1000 # Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, # Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a # direct exit without any confirmation. # c.TerminalInteractiveShell.confirm_exit = True # The shell program to be used for paging. # c.TerminalInteractiveShell.pager = 'less' # # c.TerminalInteractiveShell.wildcards_case_sensitive = True # Deprecated, use PromptManager.justify # c.TerminalInteractiveShell.prompts_pad_left = True # The name of the logfile to use. # c.TerminalInteractiveShell.logfile = '' # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run # interactively (displaying output from expressions). # c.TerminalInteractiveShell.ast_node_interactivity = 'last_expr' # # c.TerminalInteractiveShell.quiet = False # Save multi-line entries as one entry in readline history # c.TerminalInteractiveShell.multiline_history = True # Deprecated, use PromptManager.in_template # c.TerminalInteractiveShell.prompt_in1 = 'In [\\#]: ' # # c.TerminalInteractiveShell.readline_use = True # Enable magic commands to be called without the leading %. # c.TerminalInteractiveShell.automagic = True # The part of the banner to be printed before the profile # c.TerminalInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' # Make IPython automatically call any callable object even if you didn't type # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. # The value can be '0' to disable the feature, '1' for 'smart' autocall, where # it is not applied if there are no more arguments on the line, and '2' for # 'full' autocall, where all callable objects are automatically called (even if # no arguments are present). # c.TerminalInteractiveShell.autocall = 0 # Autoindent IPython code entered interactively. # c.TerminalInteractiveShell.autoindent = True # Set the color scheme (NoColor, Linux, or LightBG). # c.TerminalInteractiveShell.colors = 'LightBG' # Set the editor used by IPython (default to $EDITOR/vi/notepad). # c.TerminalInteractiveShell.editor = 'mate -w' # Use colors for displaying information about objects. Because this information # is passed through a pager (like 'less'), and some pagers get confused with # color codes, this capability can be turned off. # c.TerminalInteractiveShell.color_info = True # # c.TerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] # Deprecated, use PromptManager.in2_template # c.TerminalInteractiveShell.prompt_in2 = ' .\\D.: ' # # c.TerminalInteractiveShell.separate_out2 = '' # The part of the banner to be printed after the profile # c.TerminalInteractiveShell.banner2 = '' # Start logging to the given file in append mode. Use `logfile` to specify a log # file to **overwrite** logs to. # c.TerminalInteractiveShell.logappend = '' # Don't call post-execute functions that have failed in the past. # c.TerminalInteractiveShell.disable_failing_post_execute = False # Deprecated, use PromptManager.out_template # c.TerminalInteractiveShell.prompt_out = 'Out[\\#]: ' # Enable deep (recursive) reloading by default. IPython can use the deep_reload # module which reloads changes in modules recursively (it replaces the reload() # function, so you don't need to change anything to use it). deep_reload() # forces a full reload of modules whose code may have changed, which the default # reload() function does not. When deep_reload is off, IPython will use the # normal reload(), but deep_reload will still be available as dreload(). # c.TerminalInteractiveShell.deep_reload = False # # c.TerminalInteractiveShell.xmode = 'Context' # Show rewritten input, e.g. for autocall. # c.TerminalInteractiveShell.show_rewritten_input = True # Number of lines of your screen, used to control printing of very long strings. # Strings longer than this number of lines will be sent through a pager instead # of directly printed. The default value for this is 0, which means IPython # will auto-detect your screen size every time it needs to print certain # potentially long strings (this doesn't change the behavior of the 'print' # keyword, it's only triggered internally). If for some reason this isn't # working well (it needs curses support), specify it yourself. Otherwise don't # change the default. # c.TerminalInteractiveShell.screen_length = 0 # A list of ast.NodeTransformer subclass instances, which will be applied to # user input before code is run. # c.TerminalInteractiveShell.ast_transformers = [] # Enable auto setting the terminal title. # c.TerminalInteractiveShell.term_title = False # ------------------------------------------------------------------------------ # PromptManager configuration # ------------------------------------------------------------------------------ # This is the primary interface for producing IPython's prompts. # # c.PromptManager.color_scheme = 'Linux' # Continuation prompt. # c.PromptManager.in2_template = ' .\\D.: ' # Input prompt. '\#' will be transformed to the prompt number # c.PromptManager.in_template = 'In [\\#]: ' # Output prompt. '\#' will be transformed to the prompt number # c.PromptManager.out_template = 'Out[\\#]: ' # If True (default), each prompt will be right-aligned with the preceding one. # c.PromptManager.justify = True # ------------------------------------------------------------------------------ # HistoryManager configuration # ------------------------------------------------------------------------------ # A class to organize all history-related functionality in one place. # HistoryManager will inherit config from: HistoryAccessor # Options for configuring the SQLite connection # # These options are passed as keyword args to sqlite3.connect when establishing # database connections. # c.HistoryManager.connection_options = {} # Should the history database include output? (default: no) # c.HistoryManager.db_log_output = False # enable the SQLite history # # set enabled=False to disable the SQLite history, in which case there will be # no stored history, no SQLite connection, and no background saving thread. # This may be necessary in some threaded environments where IPython is embedded. # c.HistoryManager.enabled = True # Path to file to use for SQLite history database. # # By default, IPython will put the history database in the IPython profile # directory. If you would rather share one history among profiles, you can set # this value in each, so that they are consistent. # # Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts. # If you see IPython hanging, try setting this to something on a local disk, # e.g:: # # ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite # c.HistoryManager.hist_file = '' # Write to database every x commands (higher values save disk access & power). # Values of 1 or less effectively disable caching. # c.HistoryManager.db_cache_size = 0 # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # PlainTextFormatter configuration # ------------------------------------------------------------------------------ # The default pretty-printer. # # This uses :mod:`IPython.lib.pretty` to compute the format data of the object. # If the object cannot be pretty printed, :func:`repr` is used. See the # documentation of :mod:`IPython.lib.pretty` for details on how to write pretty # printers. Here is a simple example:: # # def dtype_pprinter(obj, p, cycle): # if cycle: # return p.text('dtype(...)') # if hasattr(obj, 'fields'): # if obj.fields is None: # p.text(repr(obj)) # else: # p.begin_group(7, 'dtype([') # for i, field in enumerate(obj.descr): # if i > 0: # p.text(',') # p.breakable() # p.pretty(field) # p.end_group(7, '])') # PlainTextFormatter will inherit config from: BaseFormatter # # c.PlainTextFormatter.newline = '\n' # # c.PlainTextFormatter.max_width = 79 # # c.PlainTextFormatter.verbose = False # # c.PlainTextFormatter.pprint = True # # c.PlainTextFormatter.singleton_printers = {} # # c.PlainTextFormatter.type_printers = {} # Truncate large collections (lists, dicts, tuples, sets) to this size. # # Set to 0 to disable truncation. # c.PlainTextFormatter.max_seq_length = 1000 # # c.PlainTextFormatter.deferred_printers = {} # # c.PlainTextFormatter.float_precision = '' # ------------------------------------------------------------------------------ # IPCompleter configuration # ------------------------------------------------------------------------------ # Extension of the completer class with IPython-specific features # IPCompleter will inherit config from: Completer # Whether to merge completion results into a single list # # If False, only the completion results from the first non-empty completer will # be returned. # c.IPCompleter.merge_completions = True # Activate greedy completion # # This will enable completion on elements of lists, results of function calls, # etc., but can be unsafe because the code is actually evaluated on TAB. # c.IPCompleter.greedy = False # Instruct the completer to use __all__ for the completion # # Specifically, when completing on ``object.``. # # When True: only those names in obj.__all__ will be included. # # When False [default]: the __all__ attribute is ignored # c.IPCompleter.limit_to__all__ = False # Instruct the completer to omit private method names # # Specifically, when completing on ``object.``. # # When 2 [default]: all names that start with '_' will be excluded. # # When 1: all 'magic' names (``__foo__``) will be excluded. # # When 0: nothing will be excluded. # c.IPCompleter.omit__names = 2 # ------------------------------------------------------------------------------ # ScriptMagics configuration # ------------------------------------------------------------------------------ # Magics for talking to scripts # # This defines a base `%%script` cell magic for running a cell with a program in # a subprocess, and registers a few top-level magics that call %%script with # common interpreters. # Extra script cell magics to define # # This generates simple wrappers of `%%script foo` as `%%foo`. # # If you want to add script magics that aren't on your path, specify them in # script_paths # c.ScriptMagics.script_magics = [] # Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby' # # Only necessary for items in script_magics where the default path will not find # the right interpreter. # c.ScriptMagics.script_paths = {} # ------------------------------------------------------------------------------ # StoreMagics configuration # ------------------------------------------------------------------------------ # Lightweight persistence for python variables. # # Provides the %store magic. # If True, any %store-d variables will be automatically restored when IPython # starts. # c.StoreMagics.autorestore = False jupyter_core-5.7.2/tests/dotipython/profile_default/ipython_console_config.py000066400000000000000000000523071457404620400300760ustar00rootroot00000000000000# Configuration file for ipython-console. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # ZMQTerminalIPythonApp configuration # ------------------------------------------------------------------------------ # ZMQTerminalIPythonApp will inherit config from: TerminalIPythonApp, # BaseIPythonApplication, Application, InteractiveShellApp, IPythonConsoleApp, # ConnectionFileMixin # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.ZMQTerminalIPythonApp.hide_initial_ns = True # set the heartbeat port [default: random] # c.ZMQTerminalIPythonApp.hb_port = 0 # A list of dotted module names of IPython extensions to load. # c.ZMQTerminalIPythonApp.extensions = [] # Execute the given command string. # c.ZMQTerminalIPythonApp.code_to_run = '' # Path to the ssh key to use for logging in to the ssh server. # c.ZMQTerminalIPythonApp.sshkey = '' # The date format used by logging formatters for %(asctime)s # c.ZMQTerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # set the control (ROUTER) port [default: random] # c.ZMQTerminalIPythonApp.control_port = 0 # Reraise exceptions encountered loading IPython extensions? # c.ZMQTerminalIPythonApp.reraise_ipython_extension_failures = False # Set the log level by value or name. # c.ZMQTerminalIPythonApp.log_level = 30 # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.ZMQTerminalIPythonApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.ZMQTerminalIPythonApp.pylab = None # Run the module as a script. # c.ZMQTerminalIPythonApp.module_to_run = '' # Whether to display a banner upon starting IPython. # c.ZMQTerminalIPythonApp.display_banner = True # dotted module name of an IPython extension to load. # c.ZMQTerminalIPythonApp.extra_extension = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.ZMQTerminalIPythonApp.verbose_crash = False # Whether to overwrite existing config files when copying # c.ZMQTerminalIPythonApp.overwrite = False # The IPython profile to use. # c.ZMQTerminalIPythonApp.profile = 'default' # If a command or file is given via the command-line, e.g. 'ipython foo.py', # start an interactive shell after executing the file or command. # c.ZMQTerminalIPythonApp.force_interact = False # List of files to run at IPython startup. # c.ZMQTerminalIPythonApp.exec_files = [] # Start IPython quickly by skipping the loading of config files. # c.ZMQTerminalIPythonApp.quick = False # The Logging format template # c.ZMQTerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.ZMQTerminalIPythonApp.copy_config_files = False # set the stdin (ROUTER) port [default: random] # c.ZMQTerminalIPythonApp.stdin_port = 0 # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.ZMQTerminalIPythonApp.extra_config_file = '' # lines of code to run at IPython startup. # c.ZMQTerminalIPythonApp.exec_lines = [] # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.ZMQTerminalIPythonApp.gui = None # A file to be run # c.ZMQTerminalIPythonApp.file_to_run = '' # Configure matplotlib for interactive use with the default matplotlib backend. # c.ZMQTerminalIPythonApp.matplotlib = None # Suppress warning messages about legacy config files # c.ZMQTerminalIPythonApp.ignore_old_config = False # set the iopub (PUB) port [default: random] # c.ZMQTerminalIPythonApp.iopub_port = 0 # # c.ZMQTerminalIPythonApp.transport = 'tcp' # JSON file in which to store connection info [default: kernel-.json] # # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security # dir of the current profile, but can be specified by absolute path. # c.ZMQTerminalIPythonApp.connection_file = '' # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.ZMQTerminalIPythonApp.ipython_dir = '' # The SSH server to use to connect to the kernel. # c.ZMQTerminalIPythonApp.sshserver = '' # Set to display confirmation dialog on exit. You can always use 'exit' or # 'quit', to force a direct exit without any confirmation. # c.ZMQTerminalIPythonApp.confirm_exit = True # set the shell (ROUTER) port [default: random] # c.ZMQTerminalIPythonApp.shell_port = 0 # The name of the default kernel to start. # c.ZMQTerminalIPythonApp.kernel_name = 'python' # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.ZMQTerminalIPythonApp.pylab_import_all = True # Connect to an already running kernel # c.ZMQTerminalIPythonApp.existing = '' # Set the kernel's IP address [default localhost]. If the IP address is # something other than localhost, then Consoles on other machines will be able # to connect to the Kernel, so be careful! # c.ZMQTerminalIPythonApp.ip = '' # ------------------------------------------------------------------------------ # ZMQTerminalInteractiveShell configuration # ------------------------------------------------------------------------------ # A subclass of TerminalInteractiveShell that uses the 0MQ kernel # ZMQTerminalInteractiveShell will inherit config from: # TerminalInteractiveShell, InteractiveShell # # c.ZMQTerminalInteractiveShell.history_length = 10000 # auto editing of files with syntax errors. # c.ZMQTerminalInteractiveShell.autoedit_syntax = False # If True, anything that would be passed to the pager will be displayed as # regular output instead. # c.ZMQTerminalInteractiveShell.display_page = False # # c.ZMQTerminalInteractiveShell.debug = False # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run # interactively (displaying output from expressions). # c.ZMQTerminalInteractiveShell.ast_node_interactivity = 'last_expr' # Start logging to the default log file in overwrite mode. Use `logappend` to # specify a log file to **append** logs to. # c.ZMQTerminalInteractiveShell.logstart = False # Set the size of the output cache. The default is 1000, you can change it # permanently in your config file. Setting it to 0 completely disables the # caching system, and the minimum value accepted is 20 (if you provide a value # less than 20, it is reset to 0 and a warning is issued). This limit is # defined because otherwise you'll spend more time re-flushing a too small cache # than working # c.ZMQTerminalInteractiveShell.cache_size = 1000 # The shell program to be used for paging. # c.ZMQTerminalInteractiveShell.pager = 'less' # The name of the logfile to use. # c.ZMQTerminalInteractiveShell.logfile = '' # Save multi-line entries as one entry in readline history # c.ZMQTerminalInteractiveShell.multiline_history = True # # c.ZMQTerminalInteractiveShell.readline_remove_delims = '-/~' # Enable magic commands to be called without the leading %. # c.ZMQTerminalInteractiveShell.automagic = True # Prefix to add to outputs coming from clients other than this one. # # Only relevant if include_other_output is True. # c.ZMQTerminalInteractiveShell.other_output_prefix = '[remote] ' # # c.ZMQTerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] # Use colors for displaying information about objects. Because this information # is passed through a pager (like 'less'), and some pagers get confused with # color codes, this capability can be turned off. # c.ZMQTerminalInteractiveShell.color_info = True # Callable object called via 'callable' image handler with one argument, `data`, # which is `msg["content"]["data"]` where `msg` is the message from iopub # channel. For example, you can find base64 encoded PNG data as # `data['image/png']`. # c.ZMQTerminalInteractiveShell.callable_image_handler = None # Command to invoke an image viewer program when you are using 'stream' image # handler. This option is a list of string where the first element is the # command itself and reminders are the options for the command. Raw image data # is given as STDIN to the program. # c.ZMQTerminalInteractiveShell.stream_image_handler = [] # # c.ZMQTerminalInteractiveShell.separate_out2 = '' # Autoindent IPython code entered interactively. # c.ZMQTerminalInteractiveShell.autoindent = True # The part of the banner to be printed after the profile # c.ZMQTerminalInteractiveShell.banner2 = '' # Don't call post-execute functions that have failed in the past. # c.ZMQTerminalInteractiveShell.disable_failing_post_execute = False # Deprecated, use PromptManager.out_template # c.ZMQTerminalInteractiveShell.prompt_out = 'Out[\\#]: ' # # c.ZMQTerminalInteractiveShell.object_info_string_level = 0 # # c.ZMQTerminalInteractiveShell.separate_out = '' # Automatically call the pdb debugger after every exception. # c.ZMQTerminalInteractiveShell.pdb = False # Deprecated, use PromptManager.in_template # c.ZMQTerminalInteractiveShell.prompt_in1 = 'In [\\#]: ' # # c.ZMQTerminalInteractiveShell.separate_in = '\n' # # c.ZMQTerminalInteractiveShell.wildcards_case_sensitive = True # Enable auto setting the terminal title. # c.ZMQTerminalInteractiveShell.term_title = False # Enable deep (recursive) reloading by default. IPython can use the deep_reload # module which reloads changes in modules recursively (it replaces the reload() # function, so you don't need to change anything to use it). deep_reload() # forces a full reload of modules whose code may have changed, which the default # reload() function does not. When deep_reload is off, IPython will use the # normal reload(), but deep_reload will still be available as dreload(). # c.ZMQTerminalInteractiveShell.deep_reload = False # Deprecated, use PromptManager.in2_template # c.ZMQTerminalInteractiveShell.prompt_in2 = ' .\\D.: ' # Whether to include output from clients other than this one sharing the same # kernel. # # Outputs are not displayed until enter is pressed. # c.ZMQTerminalInteractiveShell.include_other_output = False # Preferred object representation MIME type in order. First matched MIME type # will be used. # c.ZMQTerminalInteractiveShell.mime_preference = ['image/png', 'image/jpeg', 'image/svg+xml'] # # c.ZMQTerminalInteractiveShell.readline_use = True # Make IPython automatically call any callable object even if you didn't type # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. # The value can be '0' to disable the feature, '1' for 'smart' autocall, where # it is not applied if there are no more arguments on the line, and '2' for # 'full' autocall, where all callable objects are automatically called (even if # no arguments are present). # c.ZMQTerminalInteractiveShell.autocall = 0 # The part of the banner to be printed before the profile # c.ZMQTerminalInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' # Handler for image type output. This is useful, for example, when connecting # to the kernel in which pylab inline backend is activated. There are four # handlers defined. 'PIL': Use Python Imaging Library to popup image; 'stream': # Use an external program to show the image. Image will be fed into the STDIN # of the program. You will need to configure `stream_image_handler`; # 'tempfile': Use an external program to show the image. Image will be saved in # a temporally file and the program is called with the temporally file. You # will need to configure `tempfile_image_handler`; 'callable': You can set any # Python callable which is called with the image data. You will need to # configure `callable_image_handler`. # c.ZMQTerminalInteractiveShell.image_handler = None # Set the color scheme (NoColor, Linux, or LightBG). # c.ZMQTerminalInteractiveShell.colors = 'LightBG' # Set the editor used by IPython (default to $EDITOR/vi/notepad). # c.ZMQTerminalInteractiveShell.editor = 'mate -w' # Show rewritten input, e.g. for autocall. # c.ZMQTerminalInteractiveShell.show_rewritten_input = True # # c.ZMQTerminalInteractiveShell.xmode = 'Context' # # c.ZMQTerminalInteractiveShell.quiet = False # A list of ast.NodeTransformer subclass instances, which will be applied to # user input before code is run. # c.ZMQTerminalInteractiveShell.ast_transformers = [] # # c.ZMQTerminalInteractiveShell.ipython_dir = '' # Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, # Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a # direct exit without any confirmation. # c.ZMQTerminalInteractiveShell.confirm_exit = True # Deprecated, use PromptManager.justify # c.ZMQTerminalInteractiveShell.prompts_pad_left = True # Timeout for giving up on a kernel (in seconds). # # On first connect and restart, the console tests whether the kernel is running # and responsive by sending kernel_info_requests. This sets the timeout in # seconds for how long the kernel can take before being presumed dead. # c.ZMQTerminalInteractiveShell.kernel_timeout = 60 # Number of lines of your screen, used to control printing of very long strings. # Strings longer than this number of lines will be sent through a pager instead # of directly printed. The default value for this is 0, which means IPython # will auto-detect your screen size every time it needs to print certain # potentially long strings (this doesn't change the behavior of the 'print' # keyword, it's only triggered internally). If for some reason this isn't # working well (it needs curses support), specify it yourself. Otherwise don't # change the default. # c.ZMQTerminalInteractiveShell.screen_length = 0 # Start logging to the given file in append mode. Use `logfile` to specify a log # file to **overwrite** logs to. # c.ZMQTerminalInteractiveShell.logappend = '' # Command to invoke an image viewer program when you are using 'tempfile' image # handler. This option is a list of string where the first element is the # command itself and reminders are the options for the command. You can use # {file} and {format} in the string to represent the location of the generated # image file and image format. # c.ZMQTerminalInteractiveShell.tempfile_image_handler = [] # ------------------------------------------------------------------------------ # KernelManager configuration # ------------------------------------------------------------------------------ # Manages a single kernel in a subprocess on this host. # # This version starts kernels with Popen. # KernelManager will inherit config from: ConnectionFileMixin # set the heartbeat port [default: random] # c.KernelManager.hb_port = 0 # set the stdin (ROUTER) port [default: random] # c.KernelManager.stdin_port = 0 # # c.KernelManager.transport = 'tcp' # JSON file in which to store connection info [default: kernel-.json] # # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security # dir of the current profile, but can be specified by absolute path. # c.KernelManager.connection_file = '' # set the control (ROUTER) port [default: random] # c.KernelManager.control_port = 0 # set the shell (ROUTER) port [default: random] # c.KernelManager.shell_port = 0 # Should we autorestart the kernel if it dies. # c.KernelManager.autorestart = False # DEPRECATED: Use kernel_name instead. # # The Popen Command to launch the kernel. Override this if you have a custom # kernel. If kernel_cmd is specified in a configuration file, IPython does not # pass any arguments to the kernel, because it cannot make any assumptions about # the arguments that the kernel understands. In particular, this means that the # kernel does not receive the option --debug if it given on the IPython command # line. # c.KernelManager.kernel_cmd = [] # Set the kernel's IP address [default localhost]. If the IP address is # something other than localhost, then Consoles on other machines will be able # to connect to the Kernel, so be careful! # c.KernelManager.ip = '' # set the iopub (PUB) port [default: random] # c.KernelManager.iopub_port = 0 # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # Session configuration # ------------------------------------------------------------------------------ # Object for handling serialization and sending of messages. # # The Session object handles building messages and sending them with ZMQ sockets # or ZMQStream objects. Objects can communicate with each other over the # network via Session objects, and only need to work with the dict-based IPython # message spec. The Session will handle serialization/deserialization, security, # and metadata. # # Sessions support configurable serialization via packer/unpacker traits, and # signing with HMAC digests via the key/keyfile traits. # # Parameters ---------- # # debug : bool # whether to trigger extra debugging statements # packer/unpacker : str : 'json', 'pickle' or import_string # importstrings for methods to serialize message parts. If just # 'json' or 'pickle', predefined JSON and pickle packers will be used. # Otherwise, the entire importstring must be used. # # The functions must accept at least valid JSON input, and output *bytes*. # # For example, to use msgpack: # packer = 'msgpack.packb', unpacker='msgpack.unpackb' # pack/unpack : callables # You can also set the pack/unpack callables for serialization directly. # session : bytes # the ID of this Session object. The default is to generate a new UUID. # username : unicode # username added to message headers. The default is to ask the OS. # key : bytes # The key used to initialize an HMAC signature. If unset, messages # will not be signed or checked. # keyfile : filepath # The file containing a key. If this is set, `key` will be initialized # to the contents of the file. # The digest scheme used to construct the message signatures. Must have the form # 'hmac-HASH'. # c.Session.signature_scheme = 'hmac-sha256' # The maximum number of digests to remember. # # The digest history will be culled when it exceeds this value. # c.Session.digest_history_size = 65536 # The name of the unpacker for unserializing messages. Only used with custom # functions for `packer`. # c.Session.unpacker = 'json' # The name of the packer for serializing messages. Should be one of 'json', # 'pickle', or an import name for a custom callable serializer. # c.Session.packer = 'json' # Username for the Session. Default is your system username. # c.Session.username = 'minrk' # Debug output in the Session # c.Session.debug = False # path to file containing execution key. # c.Session.keyfile = '' # The maximum number of items for a container to be introspected for custom # serialization. Containers larger than this are pickled outright. # c.Session.item_threshold = 64 # Threshold (in bytes) beyond which an object's buffer should be extracted to # avoid pickling. # c.Session.buffer_threshold = 1024 # The UUID identifying this session. # c.Session.session = '' # Threshold (in bytes) beyond which a buffer should be sent without copying. # c.Session.copy_threshold = 65536 # execution key, for signing messages. # c.Session.key = b'' # Metadata dictionary, which serves as the default top-level metadata dict for # each message. # c.Session.metadata = {} jupyter_core-5.7.2/tests/dotipython/profile_default/ipython_kernel_config.py000066400000000000000000000360171457404620400277140ustar00rootroot00000000000000# Configuration file for ipython-kernel. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # IPKernelApp configuration # ------------------------------------------------------------------------------ # IPython: an enhanced interactive Python shell. # IPKernelApp will inherit config from: BaseIPythonApplication, Application, # InteractiveShellApp, ConnectionFileMixin # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.IPKernelApp.hide_initial_ns = True # The importstring for the DisplayHook factory # c.IPKernelApp.displayhook_class = 'IPython.kernel.zmq.displayhook.ZMQDisplayHook' # A list of dotted module names of IPython extensions to load. # c.IPKernelApp.extensions = [] # Execute the given command string. # c.IPKernelApp.code_to_run = '' # redirect stderr to the null device # c.IPKernelApp.no_stderr = False # The date format used by logging formatters for %(asctime)s # c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # Whether to create profile dir if it doesn't exist # c.IPKernelApp.auto_create = False # Reraise exceptions encountered loading IPython extensions? # c.IPKernelApp.reraise_ipython_extension_failures = False # Set the log level by value or name. # c.IPKernelApp.log_level = 30 # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.IPKernelApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.IPKernelApp.pylab = None # Run the module as a script. # c.IPKernelApp.module_to_run = '' # The importstring for the OutStream factory # c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream' # dotted module name of an IPython extension to load. # c.IPKernelApp.extra_extension = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.IPKernelApp.verbose_crash = False # Whether to overwrite existing config files when copying # c.IPKernelApp.overwrite = False # The IPython profile to use. # c.IPKernelApp.profile = 'default' # List of files to run at IPython startup. # c.IPKernelApp.exec_files = [] # The Logging format template # c.IPKernelApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.IPKernelApp.copy_config_files = False # set the stdin (ROUTER) port [default: random] # c.IPKernelApp.stdin_port = 0 # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.IPKernelApp.extra_config_file = '' # lines of code to run at IPython startup. # c.IPKernelApp.exec_lines = [] # set the control (ROUTER) port [default: random] # c.IPKernelApp.control_port = 0 # set the heartbeat port [default: random] # c.IPKernelApp.hb_port = 0 # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.IPKernelApp.gui = None # A file to be run # c.IPKernelApp.file_to_run = '' # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.IPKernelApp.ipython_dir = '' # kill this process if its parent dies. On Windows, the argument specifies the # HANDLE of the parent process, otherwise it is simply boolean. # c.IPKernelApp.parent_handle = 0 # Configure matplotlib for interactive use with the default matplotlib backend. # c.IPKernelApp.matplotlib = None # set the iopub (PUB) port [default: random] # c.IPKernelApp.iopub_port = 0 # redirect stdout to the null device # c.IPKernelApp.no_stdout = False # # c.IPKernelApp.transport = 'tcp' # JSON file in which to store connection info [default: kernel-.json] # # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security # dir of the current profile, but can be specified by absolute path. # c.IPKernelApp.connection_file = '' # The Kernel subclass to be used. # # This should allow easy re-use of the IPKernelApp entry point to configure and # launch kernels other than IPython's own. # c.IPKernelApp.kernel_class = # ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. # c.IPKernelApp.interrupt = 0 # set the shell (ROUTER) port [default: random] # c.IPKernelApp.shell_port = 0 # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.IPKernelApp.pylab_import_all = True # Set the kernel's IP address [default localhost]. If the IP address is # something other than localhost, then Consoles on other machines will be able # to connect to the Kernel, so be careful! # c.IPKernelApp.ip = '' # ------------------------------------------------------------------------------ # IPythonKernel configuration # ------------------------------------------------------------------------------ # IPythonKernel will inherit config from: Kernel # # c.IPythonKernel._execute_sleep = 0.0005 # Whether to use appnope for compatibility with OS X App Nap. # # Only affects OS X >= 10.9. # c.IPythonKernel._darwin_app_nap = True # # c.IPythonKernel._poll_interval = 0.05 # ------------------------------------------------------------------------------ # ZMQInteractiveShell configuration # ------------------------------------------------------------------------------ # A subclass of InteractiveShell for ZMQ. # ZMQInteractiveShell will inherit config from: InteractiveShell # # c.ZMQInteractiveShell.object_info_string_level = 0 # # c.ZMQInteractiveShell.separate_out = '' # Automatically call the pdb debugger after every exception. # c.ZMQInteractiveShell.pdb = False # # c.ZMQInteractiveShell.ipython_dir = '' # # c.ZMQInteractiveShell.history_length = 10000 # # c.ZMQInteractiveShell.readline_remove_delims = '-/~' # If True, anything that would be passed to the pager will be displayed as # regular output instead. # c.ZMQInteractiveShell.display_page = False # Deprecated, use PromptManager.in2_template # c.ZMQInteractiveShell.prompt_in2 = ' .\\D.: ' # # c.ZMQInteractiveShell.separate_in = '\n' # Start logging to the default log file in overwrite mode. Use `logappend` to # specify a log file to **append** logs to. # c.ZMQInteractiveShell.logstart = False # Set the size of the output cache. The default is 1000, you can change it # permanently in your config file. Setting it to 0 completely disables the # caching system, and the minimum value accepted is 20 (if you provide a value # less than 20, it is reset to 0 and a warning is issued). This limit is # defined because otherwise you'll spend more time re-flushing a too small cache # than working # c.ZMQInteractiveShell.cache_size = 1000 # # c.ZMQInteractiveShell.wildcards_case_sensitive = True # The name of the logfile to use. # c.ZMQInteractiveShell.logfile = '' # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run # interactively (displaying output from expressions). # c.ZMQInteractiveShell.ast_node_interactivity = 'last_expr' # # c.ZMQInteractiveShell.debug = False # # c.ZMQInteractiveShell.quiet = False # Save multi-line entries as one entry in readline history # c.ZMQInteractiveShell.multiline_history = True # Deprecated, use PromptManager.in_template # c.ZMQInteractiveShell.prompt_in1 = 'In [\\#]: ' # Enable magic commands to be called without the leading %. # c.ZMQInteractiveShell.automagic = True # The part of the banner to be printed before the profile # c.ZMQInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' # Make IPython automatically call any callable object even if you didn't type # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. # The value can be '0' to disable the feature, '1' for 'smart' autocall, where # it is not applied if there are no more arguments on the line, and '2' for # 'full' autocall, where all callable objects are automatically called (even if # no arguments are present). # c.ZMQInteractiveShell.autocall = 0 # # c.ZMQInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] # Set the color scheme (NoColor, Linux, or LightBG). # c.ZMQInteractiveShell.colors = 'LightBG' # Use colors for displaying information about objects. Because this information # is passed through a pager (like 'less'), and some pagers get confused with # color codes, this capability can be turned off. # c.ZMQInteractiveShell.color_info = True # Show rewritten input, e.g. for autocall. # c.ZMQInteractiveShell.show_rewritten_input = True # # c.ZMQInteractiveShell.xmode = 'Context' # # c.ZMQInteractiveShell.separate_out2 = '' # The part of the banner to be printed after the profile # c.ZMQInteractiveShell.banner2 = '' # Start logging to the given file in append mode. Use `logfile` to specify a log # file to **overwrite** logs to. # c.ZMQInteractiveShell.logappend = '' # Don't call post-execute functions that have failed in the past. # c.ZMQInteractiveShell.disable_failing_post_execute = False # Deprecated, use PromptManager.out_template # c.ZMQInteractiveShell.prompt_out = 'Out[\\#]: ' # Enable deep (recursive) reloading by default. IPython can use the deep_reload # module which reloads changes in modules recursively (it replaces the reload() # function, so you don't need to change anything to use it). deep_reload() # forces a full reload of modules whose code may have changed, which the default # reload() function does not. When deep_reload is off, IPython will use the # normal reload(), but deep_reload will still be available as dreload(). # c.ZMQInteractiveShell.deep_reload = False # Deprecated, use PromptManager.justify # c.ZMQInteractiveShell.prompts_pad_left = True # A list of ast.NodeTransformer subclass instances, which will be applied to # user input before code is run. # c.ZMQInteractiveShell.ast_transformers = [] # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # Session configuration # ------------------------------------------------------------------------------ # Object for handling serialization and sending of messages. # # The Session object handles building messages and sending them with ZMQ sockets # or ZMQStream objects. Objects can communicate with each other over the # network via Session objects, and only need to work with the dict-based IPython # message spec. The Session will handle serialization/deserialization, security, # and metadata. # # Sessions support configurable serialization via packer/unpacker traits, and # signing with HMAC digests via the key/keyfile traits. # # Parameters ---------- # # debug : bool # whether to trigger extra debugging statements # packer/unpacker : str : 'json', 'pickle' or import_string # importstrings for methods to serialize message parts. If just # 'json' or 'pickle', predefined JSON and pickle packers will be used. # Otherwise, the entire importstring must be used. # # The functions must accept at least valid JSON input, and output *bytes*. # # For example, to use msgpack: # packer = 'msgpack.packb', unpacker='msgpack.unpackb' # pack/unpack : callables # You can also set the pack/unpack callables for serialization directly. # session : bytes # the ID of this Session object. The default is to generate a new UUID. # username : unicode # username added to message headers. The default is to ask the OS. # key : bytes # The key used to initialize an HMAC signature. If unset, messages # will not be signed or checked. # keyfile : filepath # The file containing a key. If this is set, `key` will be initialized # to the contents of the file. # The digest scheme used to construct the message signatures. Must have the form # 'hmac-HASH'. # c.Session.signature_scheme = 'hmac-sha256' # The maximum number of digests to remember. # # The digest history will be culled when it exceeds this value. # c.Session.digest_history_size = 65536 # The name of the unpacker for unserializing messages. Only used with custom # functions for `packer`. # c.Session.unpacker = 'json' # The name of the packer for serializing messages. Should be one of 'json', # 'pickle', or an import name for a custom callable serializer. # c.Session.packer = 'json' # Username for the Session. Default is your system username. # c.Session.username = 'minrk' # Debug output in the Session # c.Session.debug = False # path to file containing execution key. # c.Session.keyfile = '' # The maximum number of items for a container to be introspected for custom # serialization. Containers larger than this are pickled outright. # c.Session.item_threshold = 64 # Threshold (in bytes) beyond which an object's buffer should be extracted to # avoid pickling. # c.Session.buffer_threshold = 1024 # The UUID identifying this session. # c.Session.session = '' # Threshold (in bytes) beyond which a buffer should be sent without copying. # c.Session.copy_threshold = 65536 # execution key, for signing messages. # c.Session.key = b'' # Metadata dictionary, which serves as the default top-level metadata dict for # each message. # c.Session.metadata = {} jupyter_core-5.7.2/tests/dotipython/profile_default/ipython_nbconvert_config.py000066400000000000000000000001101457404620400304150ustar00rootroot00000000000000from __future__ import annotations c.NbConvertApp.post_processors = [] jupyter_core-5.7.2/tests/dotipython/profile_default/ipython_notebook_config.py000066400000000000000000000001071457404620400302430ustar00rootroot00000000000000from __future__ import annotations c.NotebookApp.open_browser = False jupyter_core-5.7.2/tests/dotipython/profile_default/static/000077500000000000000000000000001457404620400242435ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython/profile_default/static/custom/000077500000000000000000000000001457404620400255555ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython/profile_default/static/custom/custom.css000066400000000000000000000002211457404620400275740ustar00rootroot00000000000000/* Placeholder for custom user CSS mainly to be overridden in profile/static/custom/custom.css This will always be an empty file in IPython */ jupyter_core-5.7.2/tests/dotipython/profile_default/static/custom/custom.js000066400000000000000000000053631457404620400274340ustar00rootroot00000000000000// leave at least 2 line with only a star on it below, or doc generation fails /** * * * Placeholder for custom user javascript * mainly to be overridden in profile/static/custom/custom.js * This will always be an empty file in IPython * * User could add any javascript in the `profile/static/custom/custom.js` file. * It will be executed by the ipython notebook at load time. * * Same thing with `profile/static/custom/custom.css` to inject custom css into the notebook. * * * The object available at load time depend on the version of IPython in use. * there is no guaranties of API stability. * * The example below explain the principle, and might not be valid. * * Instances are created after the loading of this file and might need to be accessed using events: * define([ * 'base/js/namespace', * 'base/js/events' * ], function(IPython, events) { * events.on("app_initialized.NotebookApp", function () { * IPython.keyboard_manager.... * }); * }); * * __Example 1:__ * * Create a custom button in toolbar that execute `%qtconsole` in kernel * and hence open a qtconsole attached to the same kernel as the current notebook * * define([ * 'base/js/namespace', * 'base/js/events' * ], function(IPython, events) { * events.on('app_initialized.NotebookApp', function(){ * IPython.toolbar.add_buttons_group([ * { * 'label' : 'run qtconsole', * 'icon' : 'icon-terminal', // select your icon from http://fortawesome.github.io/Font-Awesome/icons * 'callback': function () { * IPython.notebook.kernel.execute('%qtconsole') * } * } * // add more button here if needed. * ]); * }); * }); * * __Example 2:__ * * At the completion of the dashboard loading, load an unofficial javascript extension * that is installed in profile/static/custom/ * * define([ * 'base/js/events' * ], function(events) { * events.on('app_initialized.DashboardApp', function(){ * require(['custom/unofficial_extension.js']) * }); * }); * * __Example 3:__ * * Use `jQuery.getScript(url [, success(script, textStatus, jqXHR)] );` * to load custom script into the notebook. * * // to load the metadata ui extension example. * $.getScript('/static/notebook/js/celltoolbarpresets/example.js'); * // or * // to load the metadata ui extension to control slideshow mode / reveal js for nbconvert * $.getScript('/static/notebook/js/celltoolbarpresets/slideshow.js'); * * * @module IPython * @namespace IPython * @class customjs * @static */ jupyter_core-5.7.2/tests/dotipython_empty/000077500000000000000000000000001457404620400210265ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython_empty/profile_default/000077500000000000000000000000001457404620400241725ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython_empty/profile_default/ipython_config.py000066400000000000000000000502061457404620400275660ustar00rootroot00000000000000# Configuration file for ipython. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # InteractiveShellApp configuration # ------------------------------------------------------------------------------ # A Mixin for applications that start InteractiveShell instances. # # Provides configurables for loading extensions and executing files as part of # configuring a Shell environment. # # The following methods should be called by the :meth:`initialize` method of the # subclass: # # - :meth:`init_path` # - :meth:`init_shell` (to be implemented by the subclass) # - :meth:`init_gui_pylab` # - :meth:`init_extensions` # - :meth:`init_code` # lines of code to run at IPython startup. # c.InteractiveShellApp.exec_lines = [] # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.InteractiveShellApp.hide_initial_ns = True # A list of dotted module names of IPython extensions to load. # c.InteractiveShellApp.extensions = [] # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.InteractiveShellApp.gui = None # A file to be run # c.InteractiveShellApp.file_to_run = '' # Configure matplotlib for interactive use with the default matplotlib backend. # c.InteractiveShellApp.matplotlib = None # Reraise exceptions encountered loading IPython extensions? # c.InteractiveShellApp.reraise_ipython_extension_failures = False # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.InteractiveShellApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.InteractiveShellApp.pylab = None # Run the module as a script. # c.InteractiveShellApp.module_to_run = '' # dotted module name of an IPython extension to load. # c.InteractiveShellApp.extra_extension = '' # List of files to run at IPython startup. # c.InteractiveShellApp.exec_files = [] # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.InteractiveShellApp.pylab_import_all = True # Execute the given command string. # c.InteractiveShellApp.code_to_run = '' # ------------------------------------------------------------------------------ # TerminalIPythonApp configuration # ------------------------------------------------------------------------------ # TerminalIPythonApp will inherit config from: BaseIPythonApplication, # Application, InteractiveShellApp # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.TerminalIPythonApp.hide_initial_ns = True # A list of dotted module names of IPython extensions to load. # c.TerminalIPythonApp.extensions = [] # Execute the given command string. # c.TerminalIPythonApp.code_to_run = '' # The date format used by logging formatters for %(asctime)s # c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # Reraise exceptions encountered loading IPython extensions? # c.TerminalIPythonApp.reraise_ipython_extension_failures = False # Set the log level by value or name. # c.TerminalIPythonApp.log_level = 30 # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.TerminalIPythonApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.TerminalIPythonApp.pylab = None # Run the module as a script. # c.TerminalIPythonApp.module_to_run = '' # Whether to display a banner upon starting IPython. # c.TerminalIPythonApp.display_banner = True # dotted module name of an IPython extension to load. # c.TerminalIPythonApp.extra_extension = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.TerminalIPythonApp.verbose_crash = False # Whether to overwrite existing config files when copying # c.TerminalIPythonApp.overwrite = False # The IPython profile to use. # c.TerminalIPythonApp.profile = 'default' # If a command or file is given via the command-line, e.g. 'ipython foo.py', # start an interactive shell after executing the file or command. # c.TerminalIPythonApp.force_interact = False # List of files to run at IPython startup. # c.TerminalIPythonApp.exec_files = [] # Start IPython quickly by skipping the loading of config files. # c.TerminalIPythonApp.quick = False # The Logging format template # c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.TerminalIPythonApp.copy_config_files = False # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.TerminalIPythonApp.extra_config_file = '' # lines of code to run at IPython startup. # c.TerminalIPythonApp.exec_lines = [] # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.TerminalIPythonApp.gui = None # A file to be run # c.TerminalIPythonApp.file_to_run = '' # Configure matplotlib for interactive use with the default matplotlib backend. # c.TerminalIPythonApp.matplotlib = None # Suppress warning messages about legacy config files # c.TerminalIPythonApp.ignore_old_config = False # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.TerminalIPythonApp.ipython_dir = '' # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.TerminalIPythonApp.pylab_import_all = True # ------------------------------------------------------------------------------ # TerminalInteractiveShell configuration # ------------------------------------------------------------------------------ # TerminalInteractiveShell will inherit config from: InteractiveShell # # c.TerminalInteractiveShell.object_info_string_level = 0 # # c.TerminalInteractiveShell.separate_out = '' # Automatically call the pdb debugger after every exception. # c.TerminalInteractiveShell.pdb = False # # c.TerminalInteractiveShell.ipython_dir = '' # # c.TerminalInteractiveShell.history_length = 10000 # # c.TerminalInteractiveShell.readline_remove_delims = '-/~' # auto editing of files with syntax errors. # c.TerminalInteractiveShell.autoedit_syntax = False # If True, anything that would be passed to the pager will be displayed as # regular output instead. # c.TerminalInteractiveShell.display_page = False # # c.TerminalInteractiveShell.debug = False # # c.TerminalInteractiveShell.separate_in = '\n' # Start logging to the default log file in overwrite mode. Use `logappend` to # specify a log file to **append** logs to. # c.TerminalInteractiveShell.logstart = False # Set the size of the output cache. The default is 1000, you can change it # permanently in your config file. Setting it to 0 completely disables the # caching system, and the minimum value accepted is 20 (if you provide a value # less than 20, it is reset to 0 and a warning is issued). This limit is # defined because otherwise you'll spend more time re-flushing a too small cache # than working # c.TerminalInteractiveShell.cache_size = 1000 # Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, # Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a # direct exit without any confirmation. # c.TerminalInteractiveShell.confirm_exit = True # The shell program to be used for paging. # c.TerminalInteractiveShell.pager = 'less' # # c.TerminalInteractiveShell.wildcards_case_sensitive = True # Deprecated, use PromptManager.justify # c.TerminalInteractiveShell.prompts_pad_left = True # The name of the logfile to use. # c.TerminalInteractiveShell.logfile = '' # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run # interactively (displaying output from expressions). # c.TerminalInteractiveShell.ast_node_interactivity = 'last_expr' # # c.TerminalInteractiveShell.quiet = False # Save multi-line entries as one entry in readline history # c.TerminalInteractiveShell.multiline_history = True # Deprecated, use PromptManager.in_template # c.TerminalInteractiveShell.prompt_in1 = 'In [\\#]: ' # # c.TerminalInteractiveShell.readline_use = True # Enable magic commands to be called without the leading %. # c.TerminalInteractiveShell.automagic = True # The part of the banner to be printed before the profile # c.TerminalInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' # Make IPython automatically call any callable object even if you didn't type # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. # The value can be '0' to disable the feature, '1' for 'smart' autocall, where # it is not applied if there are no more arguments on the line, and '2' for # 'full' autocall, where all callable objects are automatically called (even if # no arguments are present). # c.TerminalInteractiveShell.autocall = 0 # Autoindent IPython code entered interactively. # c.TerminalInteractiveShell.autoindent = True # Set the color scheme (NoColor, Linux, or LightBG). # c.TerminalInteractiveShell.colors = 'LightBG' # Set the editor used by IPython (default to $EDITOR/vi/notepad). # c.TerminalInteractiveShell.editor = 'mate -w' # Use colors for displaying information about objects. Because this information # is passed through a pager (like 'less'), and some pagers get confused with # color codes, this capability can be turned off. # c.TerminalInteractiveShell.color_info = True # # c.TerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] # Deprecated, use PromptManager.in2_template # c.TerminalInteractiveShell.prompt_in2 = ' .\\D.: ' # # c.TerminalInteractiveShell.separate_out2 = '' # The part of the banner to be printed after the profile # c.TerminalInteractiveShell.banner2 = '' # Start logging to the given file in append mode. Use `logfile` to specify a log # file to **overwrite** logs to. # c.TerminalInteractiveShell.logappend = '' # Don't call post-execute functions that have failed in the past. # c.TerminalInteractiveShell.disable_failing_post_execute = False # Deprecated, use PromptManager.out_template # c.TerminalInteractiveShell.prompt_out = 'Out[\\#]: ' # Enable deep (recursive) reloading by default. IPython can use the deep_reload # module which reloads changes in modules recursively (it replaces the reload() # function, so you don't need to change anything to use it). deep_reload() # forces a full reload of modules whose code may have changed, which the default # reload() function does not. When deep_reload is off, IPython will use the # normal reload(), but deep_reload will still be available as dreload(). # c.TerminalInteractiveShell.deep_reload = False # # c.TerminalInteractiveShell.xmode = 'Context' # Show rewritten input, e.g. for autocall. # c.TerminalInteractiveShell.show_rewritten_input = True # Number of lines of your screen, used to control printing of very long strings. # Strings longer than this number of lines will be sent through a pager instead # of directly printed. The default value for this is 0, which means IPython # will auto-detect your screen size every time it needs to print certain # potentially long strings (this doesn't change the behavior of the 'print' # keyword, it's only triggered internally). If for some reason this isn't # working well (it needs curses support), specify it yourself. Otherwise don't # change the default. # c.TerminalInteractiveShell.screen_length = 0 # A list of ast.NodeTransformer subclass instances, which will be applied to # user input before code is run. # c.TerminalInteractiveShell.ast_transformers = [] # Enable auto setting the terminal title. # c.TerminalInteractiveShell.term_title = False # ------------------------------------------------------------------------------ # PromptManager configuration # ------------------------------------------------------------------------------ # This is the primary interface for producing IPython's prompts. # # c.PromptManager.color_scheme = 'Linux' # Continuation prompt. # c.PromptManager.in2_template = ' .\\D.: ' # Input prompt. '\#' will be transformed to the prompt number # c.PromptManager.in_template = 'In [\\#]: ' # Output prompt. '\#' will be transformed to the prompt number # c.PromptManager.out_template = 'Out[\\#]: ' # If True (default), each prompt will be right-aligned with the preceding one. # c.PromptManager.justify = True # ------------------------------------------------------------------------------ # HistoryManager configuration # ------------------------------------------------------------------------------ # A class to organize all history-related functionality in one place. # HistoryManager will inherit config from: HistoryAccessor # Options for configuring the SQLite connection # # These options are passed as keyword args to sqlite3.connect when establishing # database connections. # c.HistoryManager.connection_options = {} # Should the history database include output? (default: no) # c.HistoryManager.db_log_output = False # enable the SQLite history # # set enabled=False to disable the SQLite history, in which case there will be # no stored history, no SQLite connection, and no background saving thread. # This may be necessary in some threaded environments where IPython is embedded. # c.HistoryManager.enabled = True # Path to file to use for SQLite history database. # # By default, IPython will put the history database in the IPython profile # directory. If you would rather share one history among profiles, you can set # this value in each, so that they are consistent. # # Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts. # If you see IPython hanging, try setting this to something on a local disk, # e.g:: # # ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite # c.HistoryManager.hist_file = '' # Write to database every x commands (higher values save disk access & power). # Values of 1 or less effectively disable caching. # c.HistoryManager.db_cache_size = 0 # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # PlainTextFormatter configuration # ------------------------------------------------------------------------------ # The default pretty-printer. # # This uses :mod:`IPython.lib.pretty` to compute the format data of the object. # If the object cannot be pretty printed, :func:`repr` is used. See the # documentation of :mod:`IPython.lib.pretty` for details on how to write pretty # printers. Here is a simple example:: # # def dtype_pprinter(obj, p, cycle): # if cycle: # return p.text('dtype(...)') # if hasattr(obj, 'fields'): # if obj.fields is None: # p.text(repr(obj)) # else: # p.begin_group(7, 'dtype([') # for i, field in enumerate(obj.descr): # if i > 0: # p.text(',') # p.breakable() # p.pretty(field) # p.end_group(7, '])') # PlainTextFormatter will inherit config from: BaseFormatter # # c.PlainTextFormatter.newline = '\n' # # c.PlainTextFormatter.max_width = 79 # # c.PlainTextFormatter.verbose = False # # c.PlainTextFormatter.pprint = True # # c.PlainTextFormatter.singleton_printers = {} # # c.PlainTextFormatter.type_printers = {} # Truncate large collections (lists, dicts, tuples, sets) to this size. # # Set to 0 to disable truncation. # c.PlainTextFormatter.max_seq_length = 1000 # # c.PlainTextFormatter.deferred_printers = {} # # c.PlainTextFormatter.float_precision = '' # ------------------------------------------------------------------------------ # IPCompleter configuration # ------------------------------------------------------------------------------ # Extension of the completer class with IPython-specific features # IPCompleter will inherit config from: Completer # Whether to merge completion results into a single list # # If False, only the completion results from the first non-empty completer will # be returned. # c.IPCompleter.merge_completions = True # Activate greedy completion # # This will enable completion on elements of lists, results of function calls, # etc., but can be unsafe because the code is actually evaluated on TAB. # c.IPCompleter.greedy = False # Instruct the completer to use __all__ for the completion # # Specifically, when completing on ``object.``. # # When True: only those names in obj.__all__ will be included. # # When False [default]: the __all__ attribute is ignored # c.IPCompleter.limit_to__all__ = False # Instruct the completer to omit private method names # # Specifically, when completing on ``object.``. # # When 2 [default]: all names that start with '_' will be excluded. # # When 1: all 'magic' names (``__foo__``) will be excluded. # # When 0: nothing will be excluded. # c.IPCompleter.omit__names = 2 # ------------------------------------------------------------------------------ # ScriptMagics configuration # ------------------------------------------------------------------------------ # Magics for talking to scripts # # This defines a base `%%script` cell magic for running a cell with a program in # a subprocess, and registers a few top-level magics that call %%script with # common interpreters. # Extra script cell magics to define # # This generates simple wrappers of `%%script foo` as `%%foo`. # # If you want to add script magics that aren't on your path, specify them in # script_paths # c.ScriptMagics.script_magics = [] # Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby' # # Only necessary for items in script_magics where the default path will not find # the right interpreter. # c.ScriptMagics.script_paths = {} # ------------------------------------------------------------------------------ # StoreMagics configuration # ------------------------------------------------------------------------------ # Lightweight persistence for python variables. # # Provides the %store magic. # If True, any %store-d variables will be automatically restored when IPython # starts. # c.StoreMagics.autorestore = False jupyter_core-5.7.2/tests/dotipython_empty/profile_default/ipython_console_config.py000066400000000000000000000523071457404620400313140ustar00rootroot00000000000000# Configuration file for ipython-console. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # ZMQTerminalIPythonApp configuration # ------------------------------------------------------------------------------ # ZMQTerminalIPythonApp will inherit config from: TerminalIPythonApp, # BaseIPythonApplication, Application, InteractiveShellApp, IPythonConsoleApp, # ConnectionFileMixin # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.ZMQTerminalIPythonApp.hide_initial_ns = True # set the heartbeat port [default: random] # c.ZMQTerminalIPythonApp.hb_port = 0 # A list of dotted module names of IPython extensions to load. # c.ZMQTerminalIPythonApp.extensions = [] # Execute the given command string. # c.ZMQTerminalIPythonApp.code_to_run = '' # Path to the ssh key to use for logging in to the ssh server. # c.ZMQTerminalIPythonApp.sshkey = '' # The date format used by logging formatters for %(asctime)s # c.ZMQTerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # set the control (ROUTER) port [default: random] # c.ZMQTerminalIPythonApp.control_port = 0 # Reraise exceptions encountered loading IPython extensions? # c.ZMQTerminalIPythonApp.reraise_ipython_extension_failures = False # Set the log level by value or name. # c.ZMQTerminalIPythonApp.log_level = 30 # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.ZMQTerminalIPythonApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.ZMQTerminalIPythonApp.pylab = None # Run the module as a script. # c.ZMQTerminalIPythonApp.module_to_run = '' # Whether to display a banner upon starting IPython. # c.ZMQTerminalIPythonApp.display_banner = True # dotted module name of an IPython extension to load. # c.ZMQTerminalIPythonApp.extra_extension = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.ZMQTerminalIPythonApp.verbose_crash = False # Whether to overwrite existing config files when copying # c.ZMQTerminalIPythonApp.overwrite = False # The IPython profile to use. # c.ZMQTerminalIPythonApp.profile = 'default' # If a command or file is given via the command-line, e.g. 'ipython foo.py', # start an interactive shell after executing the file or command. # c.ZMQTerminalIPythonApp.force_interact = False # List of files to run at IPython startup. # c.ZMQTerminalIPythonApp.exec_files = [] # Start IPython quickly by skipping the loading of config files. # c.ZMQTerminalIPythonApp.quick = False # The Logging format template # c.ZMQTerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.ZMQTerminalIPythonApp.copy_config_files = False # set the stdin (ROUTER) port [default: random] # c.ZMQTerminalIPythonApp.stdin_port = 0 # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.ZMQTerminalIPythonApp.extra_config_file = '' # lines of code to run at IPython startup. # c.ZMQTerminalIPythonApp.exec_lines = [] # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.ZMQTerminalIPythonApp.gui = None # A file to be run # c.ZMQTerminalIPythonApp.file_to_run = '' # Configure matplotlib for interactive use with the default matplotlib backend. # c.ZMQTerminalIPythonApp.matplotlib = None # Suppress warning messages about legacy config files # c.ZMQTerminalIPythonApp.ignore_old_config = False # set the iopub (PUB) port [default: random] # c.ZMQTerminalIPythonApp.iopub_port = 0 # # c.ZMQTerminalIPythonApp.transport = 'tcp' # JSON file in which to store connection info [default: kernel-.json] # # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security # dir of the current profile, but can be specified by absolute path. # c.ZMQTerminalIPythonApp.connection_file = '' # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.ZMQTerminalIPythonApp.ipython_dir = '' # The SSH server to use to connect to the kernel. # c.ZMQTerminalIPythonApp.sshserver = '' # Set to display confirmation dialog on exit. You can always use 'exit' or # 'quit', to force a direct exit without any confirmation. # c.ZMQTerminalIPythonApp.confirm_exit = True # set the shell (ROUTER) port [default: random] # c.ZMQTerminalIPythonApp.shell_port = 0 # The name of the default kernel to start. # c.ZMQTerminalIPythonApp.kernel_name = 'python' # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.ZMQTerminalIPythonApp.pylab_import_all = True # Connect to an already running kernel # c.ZMQTerminalIPythonApp.existing = '' # Set the kernel's IP address [default localhost]. If the IP address is # something other than localhost, then Consoles on other machines will be able # to connect to the Kernel, so be careful! # c.ZMQTerminalIPythonApp.ip = '' # ------------------------------------------------------------------------------ # ZMQTerminalInteractiveShell configuration # ------------------------------------------------------------------------------ # A subclass of TerminalInteractiveShell that uses the 0MQ kernel # ZMQTerminalInteractiveShell will inherit config from: # TerminalInteractiveShell, InteractiveShell # # c.ZMQTerminalInteractiveShell.history_length = 10000 # auto editing of files with syntax errors. # c.ZMQTerminalInteractiveShell.autoedit_syntax = False # If True, anything that would be passed to the pager will be displayed as # regular output instead. # c.ZMQTerminalInteractiveShell.display_page = False # # c.ZMQTerminalInteractiveShell.debug = False # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run # interactively (displaying output from expressions). # c.ZMQTerminalInteractiveShell.ast_node_interactivity = 'last_expr' # Start logging to the default log file in overwrite mode. Use `logappend` to # specify a log file to **append** logs to. # c.ZMQTerminalInteractiveShell.logstart = False # Set the size of the output cache. The default is 1000, you can change it # permanently in your config file. Setting it to 0 completely disables the # caching system, and the minimum value accepted is 20 (if you provide a value # less than 20, it is reset to 0 and a warning is issued). This limit is # defined because otherwise you'll spend more time re-flushing a too small cache # than working # c.ZMQTerminalInteractiveShell.cache_size = 1000 # The shell program to be used for paging. # c.ZMQTerminalInteractiveShell.pager = 'less' # The name of the logfile to use. # c.ZMQTerminalInteractiveShell.logfile = '' # Save multi-line entries as one entry in readline history # c.ZMQTerminalInteractiveShell.multiline_history = True # # c.ZMQTerminalInteractiveShell.readline_remove_delims = '-/~' # Enable magic commands to be called without the leading %. # c.ZMQTerminalInteractiveShell.automagic = True # Prefix to add to outputs coming from clients other than this one. # # Only relevant if include_other_output is True. # c.ZMQTerminalInteractiveShell.other_output_prefix = '[remote] ' # # c.ZMQTerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] # Use colors for displaying information about objects. Because this information # is passed through a pager (like 'less'), and some pagers get confused with # color codes, this capability can be turned off. # c.ZMQTerminalInteractiveShell.color_info = True # Callable object called via 'callable' image handler with one argument, `data`, # which is `msg["content"]["data"]` where `msg` is the message from iopub # channel. For example, you can find base64 encoded PNG data as # `data['image/png']`. # c.ZMQTerminalInteractiveShell.callable_image_handler = None # Command to invoke an image viewer program when you are using 'stream' image # handler. This option is a list of string where the first element is the # command itself and reminders are the options for the command. Raw image data # is given as STDIN to the program. # c.ZMQTerminalInteractiveShell.stream_image_handler = [] # # c.ZMQTerminalInteractiveShell.separate_out2 = '' # Autoindent IPython code entered interactively. # c.ZMQTerminalInteractiveShell.autoindent = True # The part of the banner to be printed after the profile # c.ZMQTerminalInteractiveShell.banner2 = '' # Don't call post-execute functions that have failed in the past. # c.ZMQTerminalInteractiveShell.disable_failing_post_execute = False # Deprecated, use PromptManager.out_template # c.ZMQTerminalInteractiveShell.prompt_out = 'Out[\\#]: ' # # c.ZMQTerminalInteractiveShell.object_info_string_level = 0 # # c.ZMQTerminalInteractiveShell.separate_out = '' # Automatically call the pdb debugger after every exception. # c.ZMQTerminalInteractiveShell.pdb = False # Deprecated, use PromptManager.in_template # c.ZMQTerminalInteractiveShell.prompt_in1 = 'In [\\#]: ' # # c.ZMQTerminalInteractiveShell.separate_in = '\n' # # c.ZMQTerminalInteractiveShell.wildcards_case_sensitive = True # Enable auto setting the terminal title. # c.ZMQTerminalInteractiveShell.term_title = False # Enable deep (recursive) reloading by default. IPython can use the deep_reload # module which reloads changes in modules recursively (it replaces the reload() # function, so you don't need to change anything to use it). deep_reload() # forces a full reload of modules whose code may have changed, which the default # reload() function does not. When deep_reload is off, IPython will use the # normal reload(), but deep_reload will still be available as dreload(). # c.ZMQTerminalInteractiveShell.deep_reload = False # Deprecated, use PromptManager.in2_template # c.ZMQTerminalInteractiveShell.prompt_in2 = ' .\\D.: ' # Whether to include output from clients other than this one sharing the same # kernel. # # Outputs are not displayed until enter is pressed. # c.ZMQTerminalInteractiveShell.include_other_output = False # Preferred object representation MIME type in order. First matched MIME type # will be used. # c.ZMQTerminalInteractiveShell.mime_preference = ['image/png', 'image/jpeg', 'image/svg+xml'] # # c.ZMQTerminalInteractiveShell.readline_use = True # Make IPython automatically call any callable object even if you didn't type # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. # The value can be '0' to disable the feature, '1' for 'smart' autocall, where # it is not applied if there are no more arguments on the line, and '2' for # 'full' autocall, where all callable objects are automatically called (even if # no arguments are present). # c.ZMQTerminalInteractiveShell.autocall = 0 # The part of the banner to be printed before the profile # c.ZMQTerminalInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' # Handler for image type output. This is useful, for example, when connecting # to the kernel in which pylab inline backend is activated. There are four # handlers defined. 'PIL': Use Python Imaging Library to popup image; 'stream': # Use an external program to show the image. Image will be fed into the STDIN # of the program. You will need to configure `stream_image_handler`; # 'tempfile': Use an external program to show the image. Image will be saved in # a temporally file and the program is called with the temporally file. You # will need to configure `tempfile_image_handler`; 'callable': You can set any # Python callable which is called with the image data. You will need to # configure `callable_image_handler`. # c.ZMQTerminalInteractiveShell.image_handler = None # Set the color scheme (NoColor, Linux, or LightBG). # c.ZMQTerminalInteractiveShell.colors = 'LightBG' # Set the editor used by IPython (default to $EDITOR/vi/notepad). # c.ZMQTerminalInteractiveShell.editor = 'mate -w' # Show rewritten input, e.g. for autocall. # c.ZMQTerminalInteractiveShell.show_rewritten_input = True # # c.ZMQTerminalInteractiveShell.xmode = 'Context' # # c.ZMQTerminalInteractiveShell.quiet = False # A list of ast.NodeTransformer subclass instances, which will be applied to # user input before code is run. # c.ZMQTerminalInteractiveShell.ast_transformers = [] # # c.ZMQTerminalInteractiveShell.ipython_dir = '' # Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, # Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a # direct exit without any confirmation. # c.ZMQTerminalInteractiveShell.confirm_exit = True # Deprecated, use PromptManager.justify # c.ZMQTerminalInteractiveShell.prompts_pad_left = True # Timeout for giving up on a kernel (in seconds). # # On first connect and restart, the console tests whether the kernel is running # and responsive by sending kernel_info_requests. This sets the timeout in # seconds for how long the kernel can take before being presumed dead. # c.ZMQTerminalInteractiveShell.kernel_timeout = 60 # Number of lines of your screen, used to control printing of very long strings. # Strings longer than this number of lines will be sent through a pager instead # of directly printed. The default value for this is 0, which means IPython # will auto-detect your screen size every time it needs to print certain # potentially long strings (this doesn't change the behavior of the 'print' # keyword, it's only triggered internally). If for some reason this isn't # working well (it needs curses support), specify it yourself. Otherwise don't # change the default. # c.ZMQTerminalInteractiveShell.screen_length = 0 # Start logging to the given file in append mode. Use `logfile` to specify a log # file to **overwrite** logs to. # c.ZMQTerminalInteractiveShell.logappend = '' # Command to invoke an image viewer program when you are using 'tempfile' image # handler. This option is a list of string where the first element is the # command itself and reminders are the options for the command. You can use # {file} and {format} in the string to represent the location of the generated # image file and image format. # c.ZMQTerminalInteractiveShell.tempfile_image_handler = [] # ------------------------------------------------------------------------------ # KernelManager configuration # ------------------------------------------------------------------------------ # Manages a single kernel in a subprocess on this host. # # This version starts kernels with Popen. # KernelManager will inherit config from: ConnectionFileMixin # set the heartbeat port [default: random] # c.KernelManager.hb_port = 0 # set the stdin (ROUTER) port [default: random] # c.KernelManager.stdin_port = 0 # # c.KernelManager.transport = 'tcp' # JSON file in which to store connection info [default: kernel-.json] # # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security # dir of the current profile, but can be specified by absolute path. # c.KernelManager.connection_file = '' # set the control (ROUTER) port [default: random] # c.KernelManager.control_port = 0 # set the shell (ROUTER) port [default: random] # c.KernelManager.shell_port = 0 # Should we autorestart the kernel if it dies. # c.KernelManager.autorestart = False # DEPRECATED: Use kernel_name instead. # # The Popen Command to launch the kernel. Override this if you have a custom # kernel. If kernel_cmd is specified in a configuration file, IPython does not # pass any arguments to the kernel, because it cannot make any assumptions about # the arguments that the kernel understands. In particular, this means that the # kernel does not receive the option --debug if it given on the IPython command # line. # c.KernelManager.kernel_cmd = [] # Set the kernel's IP address [default localhost]. If the IP address is # something other than localhost, then Consoles on other machines will be able # to connect to the Kernel, so be careful! # c.KernelManager.ip = '' # set the iopub (PUB) port [default: random] # c.KernelManager.iopub_port = 0 # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # Session configuration # ------------------------------------------------------------------------------ # Object for handling serialization and sending of messages. # # The Session object handles building messages and sending them with ZMQ sockets # or ZMQStream objects. Objects can communicate with each other over the # network via Session objects, and only need to work with the dict-based IPython # message spec. The Session will handle serialization/deserialization, security, # and metadata. # # Sessions support configurable serialization via packer/unpacker traits, and # signing with HMAC digests via the key/keyfile traits. # # Parameters ---------- # # debug : bool # whether to trigger extra debugging statements # packer/unpacker : str : 'json', 'pickle' or import_string # importstrings for methods to serialize message parts. If just # 'json' or 'pickle', predefined JSON and pickle packers will be used. # Otherwise, the entire importstring must be used. # # The functions must accept at least valid JSON input, and output *bytes*. # # For example, to use msgpack: # packer = 'msgpack.packb', unpacker='msgpack.unpackb' # pack/unpack : callables # You can also set the pack/unpack callables for serialization directly. # session : bytes # the ID of this Session object. The default is to generate a new UUID. # username : unicode # username added to message headers. The default is to ask the OS. # key : bytes # The key used to initialize an HMAC signature. If unset, messages # will not be signed or checked. # keyfile : filepath # The file containing a key. If this is set, `key` will be initialized # to the contents of the file. # The digest scheme used to construct the message signatures. Must have the form # 'hmac-HASH'. # c.Session.signature_scheme = 'hmac-sha256' # The maximum number of digests to remember. # # The digest history will be culled when it exceeds this value. # c.Session.digest_history_size = 65536 # The name of the unpacker for unserializing messages. Only used with custom # functions for `packer`. # c.Session.unpacker = 'json' # The name of the packer for serializing messages. Should be one of 'json', # 'pickle', or an import name for a custom callable serializer. # c.Session.packer = 'json' # Username for the Session. Default is your system username. # c.Session.username = 'minrk' # Debug output in the Session # c.Session.debug = False # path to file containing execution key. # c.Session.keyfile = '' # The maximum number of items for a container to be introspected for custom # serialization. Containers larger than this are pickled outright. # c.Session.item_threshold = 64 # Threshold (in bytes) beyond which an object's buffer should be extracted to # avoid pickling. # c.Session.buffer_threshold = 1024 # The UUID identifying this session. # c.Session.session = '' # Threshold (in bytes) beyond which a buffer should be sent without copying. # c.Session.copy_threshold = 65536 # execution key, for signing messages. # c.Session.key = b'' # Metadata dictionary, which serves as the default top-level metadata dict for # each message. # c.Session.metadata = {} jupyter_core-5.7.2/tests/dotipython_empty/profile_default/ipython_kernel_config.py000066400000000000000000000360171457404620400311320ustar00rootroot00000000000000# Configuration file for ipython-kernel. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # IPKernelApp configuration # ------------------------------------------------------------------------------ # IPython: an enhanced interactive Python shell. # IPKernelApp will inherit config from: BaseIPythonApplication, Application, # InteractiveShellApp, ConnectionFileMixin # Should variables loaded at startup (by startup files, exec_lines, etc.) be # hidden from tools like %who? # c.IPKernelApp.hide_initial_ns = True # The importstring for the DisplayHook factory # c.IPKernelApp.displayhook_class = 'IPython.kernel.zmq.displayhook.ZMQDisplayHook' # A list of dotted module names of IPython extensions to load. # c.IPKernelApp.extensions = [] # Execute the given command string. # c.IPKernelApp.code_to_run = '' # redirect stderr to the null device # c.IPKernelApp.no_stderr = False # The date format used by logging formatters for %(asctime)s # c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # Whether to create profile dir if it doesn't exist # c.IPKernelApp.auto_create = False # Reraise exceptions encountered loading IPython extensions? # c.IPKernelApp.reraise_ipython_extension_failures = False # Set the log level by value or name. # c.IPKernelApp.log_level = 30 # Run the file referenced by the PYTHONSTARTUP environment variable at IPython # startup. # c.IPKernelApp.exec_PYTHONSTARTUP = True # Pre-load matplotlib and numpy for interactive use, selecting a particular # matplotlib backend and loop integration. # c.IPKernelApp.pylab = None # Run the module as a script. # c.IPKernelApp.module_to_run = '' # The importstring for the OutStream factory # c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream' # dotted module name of an IPython extension to load. # c.IPKernelApp.extra_extension = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.IPKernelApp.verbose_crash = False # Whether to overwrite existing config files when copying # c.IPKernelApp.overwrite = False # The IPython profile to use. # c.IPKernelApp.profile = 'default' # List of files to run at IPython startup. # c.IPKernelApp.exec_files = [] # The Logging format template # c.IPKernelApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.IPKernelApp.copy_config_files = False # set the stdin (ROUTER) port [default: random] # c.IPKernelApp.stdin_port = 0 # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.IPKernelApp.extra_config_file = '' # lines of code to run at IPython startup. # c.IPKernelApp.exec_lines = [] # set the control (ROUTER) port [default: random] # c.IPKernelApp.control_port = 0 # set the heartbeat port [default: random] # c.IPKernelApp.hb_port = 0 # Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx', # 'pyglet', 'qt', 'qt5', 'tk', 'wx'). # c.IPKernelApp.gui = None # A file to be run # c.IPKernelApp.file_to_run = '' # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.IPKernelApp.ipython_dir = '' # kill this process if its parent dies. On Windows, the argument specifies the # HANDLE of the parent process, otherwise it is simply boolean. # c.IPKernelApp.parent_handle = 0 # Configure matplotlib for interactive use with the default matplotlib backend. # c.IPKernelApp.matplotlib = None # set the iopub (PUB) port [default: random] # c.IPKernelApp.iopub_port = 0 # redirect stdout to the null device # c.IPKernelApp.no_stdout = False # # c.IPKernelApp.transport = 'tcp' # JSON file in which to store connection info [default: kernel-.json] # # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security # dir of the current profile, but can be specified by absolute path. # c.IPKernelApp.connection_file = '' # The Kernel subclass to be used. # # This should allow easy re-use of the IPKernelApp entry point to configure and # launch kernels other than IPython's own. # c.IPKernelApp.kernel_class = # ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. # c.IPKernelApp.interrupt = 0 # set the shell (ROUTER) port [default: random] # c.IPKernelApp.shell_port = 0 # If true, IPython will populate the user namespace with numpy, pylab, etc. and # an ``import *`` is done from numpy and pylab, when using pylab mode. # # When False, pylab mode should not import any names into the user namespace. # c.IPKernelApp.pylab_import_all = True # Set the kernel's IP address [default localhost]. If the IP address is # something other than localhost, then Consoles on other machines will be able # to connect to the Kernel, so be careful! # c.IPKernelApp.ip = '' # ------------------------------------------------------------------------------ # IPythonKernel configuration # ------------------------------------------------------------------------------ # IPythonKernel will inherit config from: Kernel # # c.IPythonKernel._execute_sleep = 0.0005 # Whether to use appnope for compatibility with OS X App Nap. # # Only affects OS X >= 10.9. # c.IPythonKernel._darwin_app_nap = True # # c.IPythonKernel._poll_interval = 0.05 # ------------------------------------------------------------------------------ # ZMQInteractiveShell configuration # ------------------------------------------------------------------------------ # A subclass of InteractiveShell for ZMQ. # ZMQInteractiveShell will inherit config from: InteractiveShell # # c.ZMQInteractiveShell.object_info_string_level = 0 # # c.ZMQInteractiveShell.separate_out = '' # Automatically call the pdb debugger after every exception. # c.ZMQInteractiveShell.pdb = False # # c.ZMQInteractiveShell.ipython_dir = '' # # c.ZMQInteractiveShell.history_length = 10000 # # c.ZMQInteractiveShell.readline_remove_delims = '-/~' # If True, anything that would be passed to the pager will be displayed as # regular output instead. # c.ZMQInteractiveShell.display_page = False # Deprecated, use PromptManager.in2_template # c.ZMQInteractiveShell.prompt_in2 = ' .\\D.: ' # # c.ZMQInteractiveShell.separate_in = '\n' # Start logging to the default log file in overwrite mode. Use `logappend` to # specify a log file to **append** logs to. # c.ZMQInteractiveShell.logstart = False # Set the size of the output cache. The default is 1000, you can change it # permanently in your config file. Setting it to 0 completely disables the # caching system, and the minimum value accepted is 20 (if you provide a value # less than 20, it is reset to 0 and a warning is issued). This limit is # defined because otherwise you'll spend more time re-flushing a too small cache # than working # c.ZMQInteractiveShell.cache_size = 1000 # # c.ZMQInteractiveShell.wildcards_case_sensitive = True # The name of the logfile to use. # c.ZMQInteractiveShell.logfile = '' # 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run # interactively (displaying output from expressions). # c.ZMQInteractiveShell.ast_node_interactivity = 'last_expr' # # c.ZMQInteractiveShell.debug = False # # c.ZMQInteractiveShell.quiet = False # Save multi-line entries as one entry in readline history # c.ZMQInteractiveShell.multiline_history = True # Deprecated, use PromptManager.in_template # c.ZMQInteractiveShell.prompt_in1 = 'In [\\#]: ' # Enable magic commands to be called without the leading %. # c.ZMQInteractiveShell.automagic = True # The part of the banner to be printed before the profile # c.ZMQInteractiveShell.banner1 = 'Python 3.4.3 |Continuum Analytics, Inc.| (default, Mar 6 2015, 12:07:41) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.1.0 -- An enhanced Interactive Python.\nAnaconda is brought to you by Continuum Analytics.\nPlease check out: http://continuum.io/thanks and https://binstar.org\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n' # Make IPython automatically call any callable object even if you didn't type # explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. # The value can be '0' to disable the feature, '1' for 'smart' autocall, where # it is not applied if there are no more arguments on the line, and '2' for # 'full' autocall, where all callable objects are automatically called (even if # no arguments are present). # c.ZMQInteractiveShell.autocall = 0 # # c.ZMQInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard'] # Set the color scheme (NoColor, Linux, or LightBG). # c.ZMQInteractiveShell.colors = 'LightBG' # Use colors for displaying information about objects. Because this information # is passed through a pager (like 'less'), and some pagers get confused with # color codes, this capability can be turned off. # c.ZMQInteractiveShell.color_info = True # Show rewritten input, e.g. for autocall. # c.ZMQInteractiveShell.show_rewritten_input = True # # c.ZMQInteractiveShell.xmode = 'Context' # # c.ZMQInteractiveShell.separate_out2 = '' # The part of the banner to be printed after the profile # c.ZMQInteractiveShell.banner2 = '' # Start logging to the given file in append mode. Use `logfile` to specify a log # file to **overwrite** logs to. # c.ZMQInteractiveShell.logappend = '' # Don't call post-execute functions that have failed in the past. # c.ZMQInteractiveShell.disable_failing_post_execute = False # Deprecated, use PromptManager.out_template # c.ZMQInteractiveShell.prompt_out = 'Out[\\#]: ' # Enable deep (recursive) reloading by default. IPython can use the deep_reload # module which reloads changes in modules recursively (it replaces the reload() # function, so you don't need to change anything to use it). deep_reload() # forces a full reload of modules whose code may have changed, which the default # reload() function does not. When deep_reload is off, IPython will use the # normal reload(), but deep_reload will still be available as dreload(). # c.ZMQInteractiveShell.deep_reload = False # Deprecated, use PromptManager.justify # c.ZMQInteractiveShell.prompts_pad_left = True # A list of ast.NodeTransformer subclass instances, which will be applied to # user input before code is run. # c.ZMQInteractiveShell.ast_transformers = [] # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # Session configuration # ------------------------------------------------------------------------------ # Object for handling serialization and sending of messages. # # The Session object handles building messages and sending them with ZMQ sockets # or ZMQStream objects. Objects can communicate with each other over the # network via Session objects, and only need to work with the dict-based IPython # message spec. The Session will handle serialization/deserialization, security, # and metadata. # # Sessions support configurable serialization via packer/unpacker traits, and # signing with HMAC digests via the key/keyfile traits. # # Parameters ---------- # # debug : bool # whether to trigger extra debugging statements # packer/unpacker : str : 'json', 'pickle' or import_string # importstrings for methods to serialize message parts. If just # 'json' or 'pickle', predefined JSON and pickle packers will be used. # Otherwise, the entire importstring must be used. # # The functions must accept at least valid JSON input, and output *bytes*. # # For example, to use msgpack: # packer = 'msgpack.packb', unpacker='msgpack.unpackb' # pack/unpack : callables # You can also set the pack/unpack callables for serialization directly. # session : bytes # the ID of this Session object. The default is to generate a new UUID. # username : unicode # username added to message headers. The default is to ask the OS. # key : bytes # The key used to initialize an HMAC signature. If unset, messages # will not be signed or checked. # keyfile : filepath # The file containing a key. If this is set, `key` will be initialized # to the contents of the file. # The digest scheme used to construct the message signatures. Must have the form # 'hmac-HASH'. # c.Session.signature_scheme = 'hmac-sha256' # The maximum number of digests to remember. # # The digest history will be culled when it exceeds this value. # c.Session.digest_history_size = 65536 # The name of the unpacker for unserializing messages. Only used with custom # functions for `packer`. # c.Session.unpacker = 'json' # The name of the packer for serializing messages. Should be one of 'json', # 'pickle', or an import name for a custom callable serializer. # c.Session.packer = 'json' # Username for the Session. Default is your system username. # c.Session.username = 'minrk' # Debug output in the Session # c.Session.debug = False # path to file containing execution key. # c.Session.keyfile = '' # The maximum number of items for a container to be introspected for custom # serialization. Containers larger than this are pickled outright. # c.Session.item_threshold = 64 # Threshold (in bytes) beyond which an object's buffer should be extracted to # avoid pickling. # c.Session.buffer_threshold = 1024 # The UUID identifying this session. # c.Session.session = '' # Threshold (in bytes) beyond which a buffer should be sent without copying. # c.Session.copy_threshold = 65536 # execution key, for signing messages. # c.Session.key = b'' # Metadata dictionary, which serves as the default top-level metadata dict for # each message. # c.Session.metadata = {} jupyter_core-5.7.2/tests/dotipython_empty/profile_default/ipython_nbconvert_config.py000066400000000000000000001135661457404620400316570ustar00rootroot00000000000000# Configuration file for ipython-nbconvert. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # NbConvertApp configuration # ------------------------------------------------------------------------------ # This application is used to convert notebook files (*.ipynb) to various other # formats. # # WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES. # NbConvertApp will inherit config from: BaseIPythonApplication, Application # List of notebooks to convert. Wildcards are supported. Filenames passed # positionally will be added to the list. # c.NbConvertApp.notebooks = [] # The IPython profile to use. # c.NbConvertApp.profile = 'default' # The export format to be used. # c.NbConvertApp.export_format = 'html' # The date format used by logging formatters for %(asctime)s # c.NbConvertApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # overwrite base name use for output files. can only be used when converting one # notebook at a time. # c.NbConvertApp.output_base = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.NbConvertApp.verbose_crash = False # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.NbConvertApp.extra_config_file = '' # Writer class used to write the results of the conversion # c.NbConvertApp.writer_class = 'FilesWriter' # PostProcessor class used to write the results of the conversion # c.NbConvertApp.postprocessor_class = '' # Set the log level by value or name. # c.NbConvertApp.log_level = 30 # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.NbConvertApp.ipython_dir = '' # Whether to create profile dir if it doesn't exist # c.NbConvertApp.auto_create = False # Whether to overwrite existing config files when copying # c.NbConvertApp.overwrite = False # Whether to apply a suffix prior to the extension (only relevant when # converting to notebook format). The suffix is determined by the exporter, and # is usually '.nbconvert'. # c.NbConvertApp.use_output_suffix = True # The Logging format template # c.NbConvertApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.NbConvertApp.copy_config_files = False # ------------------------------------------------------------------------------ # NbConvertBase configuration # ------------------------------------------------------------------------------ # Global configurable class for shared config # # Useful for display data priority that might be use by many transformers # DEPRECATED default highlight language, please use language_info metadata # instead # c.NbConvertBase.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.NbConvertBase.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # Exporter configuration # ------------------------------------------------------------------------------ # Class containing methods that sequentially run a list of preprocessors on a # NotebookNode object and then return the modified NotebookNode object and # accompanying resources dict. # List of preprocessors, by name or namespace, to enable. # c.Exporter.preprocessors = [] # List of preprocessors available by default, by name, namespace, instance, or # type. # c.Exporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # Extension of the file that should be written to disk # c.Exporter.file_extension = '.txt' # ------------------------------------------------------------------------------ # HTMLExporter configuration # ------------------------------------------------------------------------------ # Exports a basic HTML document. This exporter assists with the export of HTML. # Inherit from it if you are writing your own HTML template and need custom # preprocessors/filters. If you don't need custom preprocessors/ filters, just # change the 'template_file' config option. # HTMLExporter will inherit config from: TemplateExporter, Exporter # # c.HTMLExporter.jinja_logic_block_end = '' # List of preprocessors available by default, by name, namespace, instance, or # type. # c.HTMLExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.HTMLExporter.jinja_comment_block_start = '' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.HTMLExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.HTMLExporter.preprocessors = [] # Name of the template file to use # c.HTMLExporter.template_file = 'default' # # c.HTMLExporter.template_extension = '.tpl' # # c.HTMLExporter.jinja_logic_block_start = '' # # c.HTMLExporter.jinja_variable_block_start = '' # # c.HTMLExporter.template_path = ['.'] # # c.HTMLExporter.jinja_comment_block_end = '' # # c.HTMLExporter.jinja_variable_block_end = '' # Extension of the file that should be written to disk # c.HTMLExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.HTMLExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # LatexExporter configuration # ------------------------------------------------------------------------------ # Exports to a Latex template. Inherit from this class if your template is # LaTeX based and you need custom transformers/filters. Inherit from it if you # are writing your own HTML template and need custom transformers/filters. If # you don't need custom transformers/filters, just change the 'template_file' # config option. Place your template in the special "/latex" subfolder of the # "../templates" folder. # LatexExporter will inherit config from: TemplateExporter, Exporter # # c.LatexExporter.jinja_logic_block_end = '*))' # List of preprocessors available by default, by name, namespace, instance, or # type. # c.LatexExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.LatexExporter.jinja_comment_block_start = '((=' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.LatexExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.LatexExporter.preprocessors = [] # Name of the template file to use # c.LatexExporter.template_file = 'default' # # c.LatexExporter.template_extension = '.tplx' # # c.LatexExporter.jinja_logic_block_start = '((*' # # c.LatexExporter.jinja_variable_block_start = '(((' # # c.LatexExporter.template_path = ['.'] # # c.LatexExporter.jinja_comment_block_end = '=))' # # c.LatexExporter.jinja_variable_block_end = ')))' # Extension of the file that should be written to disk # c.LatexExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.LatexExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # MarkdownExporter configuration # ------------------------------------------------------------------------------ # Exports to a markdown document (.md) # MarkdownExporter will inherit config from: TemplateExporter, Exporter # # c.MarkdownExporter.jinja_logic_block_end = '' # List of preprocessors available by default, by name, namespace, instance, or # type. # c.MarkdownExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.MarkdownExporter.jinja_comment_block_start = '' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.MarkdownExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.MarkdownExporter.preprocessors = [] # Name of the template file to use # c.MarkdownExporter.template_file = 'default' # # c.MarkdownExporter.template_extension = '.tpl' # # c.MarkdownExporter.jinja_logic_block_start = '' # # c.MarkdownExporter.jinja_variable_block_start = '' # # c.MarkdownExporter.template_path = ['.'] # # c.MarkdownExporter.jinja_comment_block_end = '' # # c.MarkdownExporter.jinja_variable_block_end = '' # Extension of the file that should be written to disk # c.MarkdownExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.MarkdownExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # NotebookExporter configuration # ------------------------------------------------------------------------------ # Exports to an IPython notebook. # NotebookExporter will inherit config from: Exporter # List of preprocessors, by name or namespace, to enable. # c.NotebookExporter.preprocessors = [] # List of preprocessors available by default, by name, namespace, instance, or # type. # c.NotebookExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # Extension of the file that should be written to disk # c.NotebookExporter.file_extension = '.txt' # The nbformat version to write. Use this to downgrade notebooks. # c.NotebookExporter.nbformat_version = 4 # ------------------------------------------------------------------------------ # PDFExporter configuration # ------------------------------------------------------------------------------ # Writer designed to write to PDF files # PDFExporter will inherit config from: LatexExporter, TemplateExporter, # Exporter # # c.PDFExporter.jinja_logic_block_end = '*))' # How many times latex will be called. # c.PDFExporter.latex_count = 3 # List of preprocessors available by default, by name, namespace, instance, or # type. # c.PDFExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.PDFExporter.jinja_comment_block_start = '((=' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.PDFExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.PDFExporter.preprocessors = [] # Name of the template file to use # c.PDFExporter.template_file = 'default' # # c.PDFExporter.template_extension = '.tplx' # Whether to display the output of latex commands. # c.PDFExporter.verbose = False # # c.PDFExporter.jinja_logic_block_start = '((*' # Shell command used to compile latex. # c.PDFExporter.latex_command = ['pdflatex', '{filename}'] # # c.PDFExporter.jinja_variable_block_start = '(((' # # c.PDFExporter.template_path = ['.'] # Shell command used to run bibtex. # c.PDFExporter.bib_command = ['bibtex', '{filename}'] # # c.PDFExporter.jinja_comment_block_end = '=))' # File extensions of temp files to remove after running. # c.PDFExporter.temp_file_exts = ['.aux', '.bbl', '.blg', '.idx', '.log', '.out'] # # c.PDFExporter.jinja_variable_block_end = ')))' # Extension of the file that should be written to disk # c.PDFExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.PDFExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # PythonExporter configuration # ------------------------------------------------------------------------------ # Exports a Python code file. # PythonExporter will inherit config from: TemplateExporter, Exporter # # c.PythonExporter.jinja_logic_block_end = '' # List of preprocessors available by default, by name, namespace, instance, or # type. # c.PythonExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.PythonExporter.jinja_comment_block_start = '' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.PythonExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.PythonExporter.preprocessors = [] # Name of the template file to use # c.PythonExporter.template_file = 'default' # # c.PythonExporter.template_extension = '.tpl' # # c.PythonExporter.jinja_logic_block_start = '' # # c.PythonExporter.jinja_variable_block_start = '' # # c.PythonExporter.template_path = ['.'] # # c.PythonExporter.jinja_comment_block_end = '' # # c.PythonExporter.jinja_variable_block_end = '' # Extension of the file that should be written to disk # c.PythonExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.PythonExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # RSTExporter configuration # ------------------------------------------------------------------------------ # Exports restructured text documents. # RSTExporter will inherit config from: TemplateExporter, Exporter # # c.RSTExporter.jinja_logic_block_end = '' # List of preprocessors available by default, by name, namespace, instance, or # type. # c.RSTExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.RSTExporter.jinja_comment_block_start = '' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.RSTExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.RSTExporter.preprocessors = [] # Name of the template file to use # c.RSTExporter.template_file = 'default' # # c.RSTExporter.template_extension = '.tpl' # # c.RSTExporter.jinja_logic_block_start = '' # # c.RSTExporter.jinja_variable_block_start = '' # # c.RSTExporter.template_path = ['.'] # # c.RSTExporter.jinja_comment_block_end = '' # # c.RSTExporter.jinja_variable_block_end = '' # Extension of the file that should be written to disk # c.RSTExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.RSTExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # SlidesExporter configuration # ------------------------------------------------------------------------------ # Exports HTML slides with reveal.js # SlidesExporter will inherit config from: HTMLExporter, TemplateExporter, # Exporter # # c.SlidesExporter.jinja_logic_block_end = '' # List of preprocessors available by default, by name, namespace, instance, or # type. # c.SlidesExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.SlidesExporter.jinja_comment_block_start = '' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.SlidesExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.SlidesExporter.preprocessors = [] # Name of the template file to use # c.SlidesExporter.template_file = 'default' # # c.SlidesExporter.template_extension = '.tpl' # # c.SlidesExporter.jinja_logic_block_start = '' # # c.SlidesExporter.jinja_variable_block_start = '' # # c.SlidesExporter.template_path = ['.'] # # c.SlidesExporter.jinja_comment_block_end = '' # # c.SlidesExporter.jinja_variable_block_end = '' # Extension of the file that should be written to disk # c.SlidesExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.SlidesExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # TemplateExporter configuration # ------------------------------------------------------------------------------ # Exports notebooks into other file formats. Uses Jinja 2 templating engine to # output new formats. Inherit from this class if you are creating a new # template type along with new filters/preprocessors. If the filters/ # preprocessors provided by default suffice, there is no need to inherit from # this class. Instead, override the template_file and file_extension traits via # a config file. # # - ascii_only - add_prompts - add_anchor - html2text - strip_ansi - # comment_lines - ansi2html - strip_files_prefix - prevent_list_blocks - # highlight2html - indent - wrap_text - markdown2rst - citation2latex - # highlight2latex - filter_data_type - get_lines - escape_latex - ipython2python # - markdown2html - strip_dollars - path2url - posix_path - ansi2latex - # markdown2latex # TemplateExporter will inherit config from: Exporter # # c.TemplateExporter.jinja_logic_block_end = '' # List of preprocessors available by default, by name, namespace, instance, or # type. # c.TemplateExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor'] # # c.TemplateExporter.jinja_comment_block_start = '' # Dictionary of filters, by name and namespace, to add to the Jinja environment. # c.TemplateExporter.filters = {} # List of preprocessors, by name or namespace, to enable. # c.TemplateExporter.preprocessors = [] # Name of the template file to use # c.TemplateExporter.template_file = 'default' # # c.TemplateExporter.template_extension = '.tpl' # # c.TemplateExporter.jinja_logic_block_start = '' # # c.TemplateExporter.jinja_variable_block_start = '' # # c.TemplateExporter.template_path = ['.'] # # c.TemplateExporter.jinja_comment_block_end = '' # # c.TemplateExporter.jinja_variable_block_end = '' # Extension of the file that should be written to disk # c.TemplateExporter.file_extension = '.txt' # formats of raw cells to be included in this Exporter's output. # c.TemplateExporter.raw_mimetypes = [] # ------------------------------------------------------------------------------ # CSSHTMLHeaderPreprocessor configuration # ------------------------------------------------------------------------------ # Preprocessor used to pre-process notebook for HTML output. Adds IPython # notebook front-end CSS and Pygments CSS to HTML output. # CSSHTMLHeaderPreprocessor will inherit config from: Preprocessor, # NbConvertBase # CSS highlight class identifier # c.CSSHTMLHeaderPreprocessor.highlight_class = '.highlight' # # c.CSSHTMLHeaderPreprocessor.enabled = False # DEPRECATED default highlight language, please use language_info metadata # instead # c.CSSHTMLHeaderPreprocessor.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.CSSHTMLHeaderPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # ClearOutputPreprocessor configuration # ------------------------------------------------------------------------------ # Removes the output from all code cells in a notebook. # ClearOutputPreprocessor will inherit config from: Preprocessor, NbConvertBase # # c.ClearOutputPreprocessor.enabled = False # DEPRECATED default highlight language, please use language_info metadata # instead # c.ClearOutputPreprocessor.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.ClearOutputPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # ConvertFiguresPreprocessor configuration # ------------------------------------------------------------------------------ # Converts all of the outputs in a notebook from one format to another. # ConvertFiguresPreprocessor will inherit config from: Preprocessor, # NbConvertBase # Format the converter accepts # c.ConvertFiguresPreprocessor.from_format = '' # Format the converter writes # c.ConvertFiguresPreprocessor.to_format = '' # DEPRECATED default highlight language, please use language_info metadata # instead # c.ConvertFiguresPreprocessor.default_language = 'ipython' # # c.ConvertFiguresPreprocessor.enabled = False # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.ConvertFiguresPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # ExecutePreprocessor configuration # ------------------------------------------------------------------------------ # Executes all the cells in a notebook # ExecutePreprocessor will inherit config from: Preprocessor, NbConvertBase # # c.ExecutePreprocessor.enabled = False # DEPRECATED default highlight language, please use language_info metadata # instead # c.ExecutePreprocessor.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.ExecutePreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # If execution of a cell times out, interrupt the kernel and continue executing # other cells rather than throwing an error and stopping. # c.ExecutePreprocessor.interrupt_on_timeout = False # The time to wait (in seconds) for output from executions. # c.ExecutePreprocessor.timeout = 30 # ------------------------------------------------------------------------------ # ExtractOutputPreprocessor configuration # ------------------------------------------------------------------------------ # Extracts all of the outputs from the notebook file. The extracted outputs # are returned in the 'resources' dictionary. # ExtractOutputPreprocessor will inherit config from: Preprocessor, # NbConvertBase # # c.ExtractOutputPreprocessor.enabled = False # # c.ExtractOutputPreprocessor.output_filename_template = '{unique_key}_{cell_index}_{index}{extension}' # DEPRECATED default highlight language, please use language_info metadata # instead # c.ExtractOutputPreprocessor.default_language = 'ipython' # # c.ExtractOutputPreprocessor.extract_output_types = {'image/svg+xml', 'image/png', 'application/pdf', 'image/jpeg'} # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.ExtractOutputPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # HighlightMagicsPreprocessor configuration # ------------------------------------------------------------------------------ # Detects and tags code cells that use a different languages than Python. # HighlightMagicsPreprocessor will inherit config from: Preprocessor, # NbConvertBase # # c.HighlightMagicsPreprocessor.enabled = False # Syntax highlighting for magic's extension languages. Each item associates a # language magic extension such as %%R, with a pygments lexer such as r. # c.HighlightMagicsPreprocessor.languages = {} # DEPRECATED default highlight language, please use language_info metadata # instead # c.HighlightMagicsPreprocessor.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.HighlightMagicsPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # LatexPreprocessor configuration # ------------------------------------------------------------------------------ # Preprocessor for latex destined documents. # # Mainly populates the `latex` key in the resources dict, adding definitions for # pygments highlight styles. # LatexPreprocessor will inherit config from: Preprocessor, NbConvertBase # # c.LatexPreprocessor.enabled = False # DEPRECATED default highlight language, please use language_info metadata # instead # c.LatexPreprocessor.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.LatexPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # Preprocessor configuration # ------------------------------------------------------------------------------ # A configurable preprocessor # # Inherit from this class if you wish to have configurability for your # preprocessor. # # Any configurable traitlets this class exposed will be configurable in profiles # using c.SubClassName.attribute = value # # you can overwrite :meth:`preprocess_cell` to apply a transformation # independently on each cell or :meth:`preprocess` if you prefer your own logic. # See corresponding docstring for information. # # Disabled by default and can be enabled via the config by # 'c.YourPreprocessorName.enabled = True' # Preprocessor will inherit config from: NbConvertBase # # c.Preprocessor.enabled = False # DEPRECATED default highlight language, please use language_info metadata # instead # c.Preprocessor.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.Preprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # RevealHelpPreprocessor configuration # ------------------------------------------------------------------------------ # RevealHelpPreprocessor will inherit config from: Preprocessor, NbConvertBase # # c.RevealHelpPreprocessor.enabled = False # DEPRECATED default highlight language, please use language_info metadata # instead # c.RevealHelpPreprocessor.default_language = 'ipython' # The URL prefix for reveal.js. This can be a a relative URL for a local copy of # reveal.js, or point to a CDN. # # For speaker notes to work, a local reveal.js prefix must be used. # c.RevealHelpPreprocessor.url_prefix = 'reveal.js' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.RevealHelpPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # SVG2PDFPreprocessor configuration # ------------------------------------------------------------------------------ # Converts all of the outputs in a notebook from SVG to PDF. # SVG2PDFPreprocessor will inherit config from: ConvertFiguresPreprocessor, # Preprocessor, NbConvertBase # Format the converter writes # c.SVG2PDFPreprocessor.to_format = '' # The path to Inkscape, if necessary # c.SVG2PDFPreprocessor.inkscape = '' # Format the converter accepts # c.SVG2PDFPreprocessor.from_format = '' # The command to use for converting SVG to PDF # # This string is a template, which will be formatted with the keys to_filename # and from_filename. # # The conversion call must read the SVG from {from_flename}, and write a PDF to # {to_filename}. # c.SVG2PDFPreprocessor.command = '' # DEPRECATED default highlight language, please use language_info metadata # instead # c.SVG2PDFPreprocessor.default_language = 'ipython' # # c.SVG2PDFPreprocessor.enabled = False # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.SVG2PDFPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # FilesWriter configuration # ------------------------------------------------------------------------------ # Consumes nbconvert output and produces files. # FilesWriter will inherit config from: WriterBase, NbConvertBase # DEPRECATED default highlight language, please use language_info metadata # instead # c.FilesWriter.default_language = 'ipython' # When copying files that the notebook depends on, copy them in relation to this # path, such that the destination filename will be os.path.relpath(filename, # relpath). If FilesWriter is operating on a notebook that already exists # elsewhere on disk, then the default will be the directory containing that # notebook. # c.FilesWriter.relpath = '' # Directory to write output to. Leave blank to output to the current directory # c.FilesWriter.build_directory = '' # List of the files that the notebook references. Files will be included with # written output. # c.FilesWriter.files = [] # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.FilesWriter.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # StdoutWriter configuration # ------------------------------------------------------------------------------ # Consumes output from nbconvert export...() methods and writes to the stdout # stream. # StdoutWriter will inherit config from: WriterBase, NbConvertBase # DEPRECATED default highlight language, please use language_info metadata # instead # c.StdoutWriter.default_language = 'ipython' # List of the files that the notebook references. Files will be included with # written output. # c.StdoutWriter.files = [] # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.StdoutWriter.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # WriterBase configuration # ------------------------------------------------------------------------------ # Consumes output from nbconvert export...() methods and writes to a useful # location. # WriterBase will inherit config from: NbConvertBase # DEPRECATED default highlight language, please use language_info metadata # instead # c.WriterBase.default_language = 'ipython' # List of the files that the notebook references. Files will be included with # written output. # c.WriterBase.files = [] # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.WriterBase.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # PostProcessorBase configuration # ------------------------------------------------------------------------------ # PostProcessorBase will inherit config from: NbConvertBase # DEPRECATED default highlight language, please use language_info metadata # instead # c.PostProcessorBase.default_language = 'ipython' # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.PostProcessorBase.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # ------------------------------------------------------------------------------ # ServePostProcessor configuration # ------------------------------------------------------------------------------ # Post processor designed to serve files # # Proxies reveal.js requests to a CDN if no local reveal.js is present # ServePostProcessor will inherit config from: PostProcessorBase, NbConvertBase # URL prefix for reveal.js # c.ServePostProcessor.reveal_prefix = 'reveal.js' # Should the browser be opened automatically? # c.ServePostProcessor.open_in_browser = True # An ordered list of preferred output type, the first encountered will usually # be used when converting discarding the others. # c.ServePostProcessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain'] # DEPRECATED default highlight language, please use language_info metadata # instead # c.ServePostProcessor.default_language = 'ipython' # port for the server to listen on. # c.ServePostProcessor.port = 8000 # URL for reveal.js CDN. # c.ServePostProcessor.reveal_cdn = 'https://cdn.jsdelivr.net/reveal.js/2.6.2' # The IP address to listen on. # c.ServePostProcessor.ip = '127.0.0.1' jupyter_core-5.7.2/tests/dotipython_empty/profile_default/ipython_notebook_config.py000066400000000000000000000470141457404620400314710ustar00rootroot00000000000000# Configuration file for ipython-notebook. from __future__ import annotations c = get_config() # ------------------------------------------------------------------------------ # NotebookApp configuration # ------------------------------------------------------------------------------ # NotebookApp will inherit config from: BaseIPythonApplication, Application # Supply SSL options for the tornado HTTPServer. See the tornado docs for # details. # c.NotebookApp.ssl_options = {} # The config manager class to use # c.NotebookApp.config_manager_class = # Hashed password to use for web authentication. # # To generate, type in a python/IPython shell: # # from IPython.lib import passwd; passwd() # # The string should be of the form type:salt:hashed-password. # c.NotebookApp.password = '' # The number of additional ports to try if the specified port is not available. # c.NotebookApp.port_retries = 50 # The kernel manager class to use. # c.NotebookApp.kernel_manager_class = # The port the notebook server will listen on. # c.NotebookApp.port = 8888 # Set the log level by value or name. # c.NotebookApp.log_level = 30 # Path to an extra config file to load. # # If specified, load this config file in addition to any other IPython config. # c.NotebookApp.extra_config_file = '' # The cluster manager class to use. # c.NotebookApp.cluster_manager_class = # The base URL for the notebook server. # # Leading and trailing slashes can be omitted, and will automatically be added. # c.NotebookApp.base_url = '/' # Python modules to load as notebook server extensions. This is an experimental # API, and may change in future releases. # c.NotebookApp.server_extensions = [] # The login handler class to use. # c.NotebookApp.login_handler_class = # The session manager class to use. # c.NotebookApp.session_manager_class = # Set the Access-Control-Allow-Origin header # # Use '*' to allow any origin to access your server. # # Takes precedence over allow_origin_pat. # c.NotebookApp.allow_origin = '' # Whether to enable MathJax for typesetting math/TeX # # MathJax is the javascript library IPython uses to render math/LaTeX. It is # very large, so you may want to disable it if you have a slow internet # connection, or for offline use of the notebook. # # When disabled, equations etc. will appear as their untransformed TeX source. # c.NotebookApp.enable_mathjax = True # The notebook manager class to use. # c.NotebookApp.contents_manager_class = # The full path to an SSL/TLS certificate file. # c.NotebookApp.certfile = '' # Set the Access-Control-Allow-Credentials: true header # c.NotebookApp.allow_credentials = False # The Logging format template # c.NotebookApp.log_format = '[%(name)s]%(highlevel)s %(message)s' # The base URL for websockets, if it differs from the HTTP server (hint: it # almost certainly doesn't). # # Should be in the form of an HTTP origin: ws[s]://hostname[:port] # c.NotebookApp.websocket_url = '' # Use a regular expression for the Access-Control-Allow-Origin header # # Requests from an origin matching the expression will get replies with: # # Access-Control-Allow-Origin: origin # # where `origin` is the origin of the request. # # Ignored if allow_origin is set. # c.NotebookApp.allow_origin_pat = '' # The date format used by logging formatters for %(asctime)s # c.NotebookApp.log_datefmt = '%Y-%m-%d %H:%M:%S' # The logout handler class to use. # c.NotebookApp.logout_handler_class = # The default URL to redirect to from `/` # c.NotebookApp.default_url = '/tree' # The IPython profile to use. # c.NotebookApp.profile = 'default' # extra paths to look for Javascript notebook extensions # c.NotebookApp.extra_nbextensions_path = [] # Specify what command to use to invoke a web browser when opening the notebook. # If not specified, the default browser will be determined by the `webbrowser` # standard library module, which allows setting of the BROWSER environment # variable to override it. # c.NotebookApp.browser = '' # The url for MathJax.js. # c.NotebookApp.mathjax_url = '' # Supply overrides for the tornado.web.Application that the IPython notebook # uses. # c.NotebookApp.tornado_settings = {} # The file where the cookie secret is stored. # c.NotebookApp.cookie_secret_file = '' # Create a massive crash report when IPython encounters what may be an internal # error. The default is to append a short message to the usual traceback # c.NotebookApp.verbose_crash = False # Whether to overwrite existing config files when copying # c.NotebookApp.overwrite = False # Whether to open in a browser after starting. The specific browser used is # platform dependent and determined by the python standard library `webbrowser` # module, unless it is overridden using the --browser (NotebookApp.browser) # configuration option. # c.NotebookApp.open_browser = True # DEPRECATED, use tornado_settings # c.NotebookApp.webapp_settings = {} # Reraise exceptions encountered loading server extensions? # c.NotebookApp.reraise_server_extension_failures = False # Whether to install the default config files into the profile dir. If a new # profile is being created, and IPython contains config files for that profile, # then they will be staged into the new directory. Otherwise, default config # files will be automatically generated. # c.NotebookApp.copy_config_files = False # DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. # c.NotebookApp.pylab = 'disabled' # The directory to use for notebooks and kernels. # c.NotebookApp.notebook_dir = '' # The kernel spec manager class to use. Should be a subclass of # `IPython.kernel.kernelspec.KernelSpecManager`. # # The Api of KernelSpecManager is provisional and might change without warning # between this version of IPython and the next stable one. # c.NotebookApp.kernel_spec_manager_class = # # c.NotebookApp.file_to_run = '' # DEPRECATED use base_url # c.NotebookApp.base_project_url = '/' # The random bytes used to secure cookies. By default this is a new random # number every time you start the Notebook. Set it to a value in a config file # to enable logins to persist across server sessions. # # Note: Cookie secrets should be kept private, do not share config files with # cookie_secret stored in plaintext (you can read the value from a file). # c.NotebookApp.cookie_secret = b'' # The full path to a private key file for usage with SSL/TLS. # c.NotebookApp.keyfile = '' # Extra paths to search for serving static files. # # This allows adding javascript/css to be available from the notebook server # machine, or overriding individual files in the IPython # c.NotebookApp.extra_static_paths = [] # The name of the IPython directory. This directory is used for logging # configuration (through profiles), history storage, etc. The default is usually # $HOME/.ipython. This option can also be specified through the environment # variable IPYTHONDIR. # c.NotebookApp.ipython_dir = '' # Extra paths to search for serving jinja templates. # # Can be used to override templates from IPython.html.templates. # c.NotebookApp.extra_template_paths = [] # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL # c.NotebookApp.trust_xheaders = False # Supply extra arguments that will be passed to Jinja environment. # c.NotebookApp.jinja_environment_options = {} # The IP address the notebook server will listen on. # c.NotebookApp.ip = 'localhost' # ------------------------------------------------------------------------------ # KernelManager configuration # ------------------------------------------------------------------------------ # Manages a single kernel in a subprocess on this host. # # This version starts kernels with Popen. # KernelManager will inherit config from: ConnectionFileMixin # set the heartbeat port [default: random] # c.KernelManager.hb_port = 0 # set the stdin (ROUTER) port [default: random] # c.KernelManager.stdin_port = 0 # # c.KernelManager.transport = 'tcp' # JSON file in which to store connection info [default: kernel-.json] # # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security # dir of the current profile, but can be specified by absolute path. # c.KernelManager.connection_file = '' # set the control (ROUTER) port [default: random] # c.KernelManager.control_port = 0 # set the shell (ROUTER) port [default: random] # c.KernelManager.shell_port = 0 # Should we autorestart the kernel if it dies. # c.KernelManager.autorestart = False # DEPRECATED: Use kernel_name instead. # # The Popen Command to launch the kernel. Override this if you have a custom # kernel. If kernel_cmd is specified in a configuration file, IPython does not # pass any arguments to the kernel, because it cannot make any assumptions about # the arguments that the kernel understands. In particular, this means that the # kernel does not receive the option --debug if it given on the IPython command # line. # c.KernelManager.kernel_cmd = [] # Set the kernel's IP address [default localhost]. If the IP address is # something other than localhost, then Consoles on other machines will be able # to connect to the Kernel, so be careful! # c.KernelManager.ip = '' # set the iopub (PUB) port [default: random] # c.KernelManager.iopub_port = 0 # ------------------------------------------------------------------------------ # ProfileDir configuration # ------------------------------------------------------------------------------ # An object to manage the profile directory and its resources. # # The profile directory is used by all IPython applications, to manage # configuration, logging and security. # # This object knows how to find, create and manage these directories. This # should be used by any code that wants to handle profiles. # Set the profile location directly. This overrides the logic used by the # `profile` option. # c.ProfileDir.location = '' # ------------------------------------------------------------------------------ # Session configuration # ------------------------------------------------------------------------------ # Object for handling serialization and sending of messages. # # The Session object handles building messages and sending them with ZMQ sockets # or ZMQStream objects. Objects can communicate with each other over the # network via Session objects, and only need to work with the dict-based IPython # message spec. The Session will handle serialization/deserialization, security, # and metadata. # # Sessions support configurable serialization via packer/unpacker traits, and # signing with HMAC digests via the key/keyfile traits. # # Parameters ---------- # # debug : bool # whether to trigger extra debugging statements # packer/unpacker : str : 'json', 'pickle' or import_string # importstrings for methods to serialize message parts. If just # 'json' or 'pickle', predefined JSON and pickle packers will be used. # Otherwise, the entire importstring must be used. # # The functions must accept at least valid JSON input, and output *bytes*. # # For example, to use msgpack: # packer = 'msgpack.packb', unpacker='msgpack.unpackb' # pack/unpack : callables # You can also set the pack/unpack callables for serialization directly. # session : bytes # the ID of this Session object. The default is to generate a new UUID. # username : unicode # username added to message headers. The default is to ask the OS. # key : bytes # The key used to initialize an HMAC signature. If unset, messages # will not be signed or checked. # keyfile : filepath # The file containing a key. If this is set, `key` will be initialized # to the contents of the file. # The digest scheme used to construct the message signatures. Must have the form # 'hmac-HASH'. # c.Session.signature_scheme = 'hmac-sha256' # The maximum number of digests to remember. # # The digest history will be culled when it exceeds this value. # c.Session.digest_history_size = 65536 # The name of the unpacker for unserializing messages. Only used with custom # functions for `packer`. # c.Session.unpacker = 'json' # The name of the packer for serializing messages. Should be one of 'json', # 'pickle', or an import name for a custom callable serializer. # c.Session.packer = 'json' # Username for the Session. Default is your system username. # c.Session.username = 'minrk' # Debug output in the Session # c.Session.debug = False # path to file containing execution key. # c.Session.keyfile = '' # The maximum number of items for a container to be introspected for custom # serialization. Containers larger than this are pickled outright. # c.Session.item_threshold = 64 # Threshold (in bytes) beyond which an object's buffer should be extracted to # avoid pickling. # c.Session.buffer_threshold = 1024 # The UUID identifying this session. # c.Session.session = '' # Threshold (in bytes) beyond which a buffer should be sent without copying. # c.Session.copy_threshold = 65536 # execution key, for signing messages. # c.Session.key = b'' # Metadata dictionary, which serves as the default top-level metadata dict for # each message. # c.Session.metadata = {} # ------------------------------------------------------------------------------ # MappingKernelManager configuration # ------------------------------------------------------------------------------ # A KernelManager that handles notebook mapping and HTTP error handling # MappingKernelManager will inherit config from: MultiKernelManager # The kernel manager class. This is configurable to allow subclassing of the # KernelManager for customized behavior. # c.MappingKernelManager.kernel_manager_class = 'IPython.kernel.ioloop.IOLoopKernelManager' # # c.MappingKernelManager.root_dir = '' # The name of the default kernel to start # c.MappingKernelManager.default_kernel_name = 'python3' # ------------------------------------------------------------------------------ # ContentsManager configuration # ------------------------------------------------------------------------------ # Base class for serving files and directories. # # This serves any text or binary file, as well as directories, with special # handling for JSON notebook documents. # # Most APIs take a path argument, which is always an API-style unicode path, and # always refers to a directory. # # - unicode, not url-escaped # - '/'-separated # - leading and trailing '/' will be stripped # - if unspecified, path defaults to '', # indicating the root path. # The base name used when creating untitled directories. # c.ContentsManager.untitled_directory = 'Untitled Folder' # Python callable or importstring thereof # # To be called on a contents model prior to save. # # This can be used to process the structure, such as removing notebook outputs # or other side effects that should not be saved. # # It will be called as (all arguments passed by keyword):: # # hook(path=path, model=model, contents_manager=self) # # - model: the model to be saved. Includes file contents. # Modifying this dict will affect the file that is stored. # - path: the API path of the save destination # - contents_manager: this ContentsManager instance # c.ContentsManager.pre_save_hook = None # Glob patterns to hide in file and directory listings. # c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] # The base name used when creating untitled files. # c.ContentsManager.untitled_file = 'untitled' # The base name used when creating untitled notebooks. # c.ContentsManager.untitled_notebook = 'Untitled' # # c.ContentsManager.checkpoints = None # # c.ContentsManager.checkpoints_class = # # c.ContentsManager.checkpoints_kwargs = {} # ------------------------------------------------------------------------------ # FileContentsManager configuration # ------------------------------------------------------------------------------ # FileContentsManager will inherit config from: ContentsManager # The base name used when creating untitled directories. # c.FileContentsManager.untitled_directory = 'Untitled Folder' # Python callable or importstring thereof # # To be called on a contents model prior to save. # # This can be used to process the structure, such as removing notebook outputs # or other side effects that should not be saved. # # It will be called as (all arguments passed by keyword):: # # hook(path=path, model=model, contents_manager=self) # # - model: the model to be saved. Includes file contents. # Modifying this dict will affect the file that is stored. # - path: the API path of the save destination # - contents_manager: this ContentsManager instance # c.FileContentsManager.pre_save_hook = None # Glob patterns to hide in file and directory listings. # c.FileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] # The base name used when creating untitled files. # c.FileContentsManager.untitled_file = 'untitled' # The base name used when creating untitled notebooks. # c.FileContentsManager.untitled_notebook = 'Untitled' # Python callable or importstring thereof # # to be called on the path of a file just saved. # # This can be used to process the file on disk, such as converting the notebook # to a script or HTML via nbconvert. # # It will be called as (all arguments passed by keyword):: # # hook(os_path=os_path, model=model, contents_manager=instance) # # - path: the filesystem path to the file just written - model: the model # representing the file - contents_manager: this ContentsManager instance # c.FileContentsManager.post_save_hook = None # DEPRECATED, use post_save_hook # c.FileContentsManager.save_script = False # # c.FileContentsManager.root_dir = '' # # c.FileContentsManager.checkpoints_class = # # c.FileContentsManager.checkpoints = None # # c.FileContentsManager.checkpoints_kwargs = {} # ------------------------------------------------------------------------------ # NotebookNotary configuration # ------------------------------------------------------------------------------ # A class for computing and verifying notebook signatures. # The number of notebook signatures to cache. When the number of signatures # exceeds this value, the oldest 25% of signatures will be culled. # c.NotebookNotary.cache_size = 65535 # The sqlite file in which to store notebook signatures. By default, this will # be in your IPython profile. You can set it to ':memory:' to disable sqlite # writing to the filesystem. # c.NotebookNotary.db_file = '' # The secret key with which notebooks are signed. # c.NotebookNotary.secret = b'' # The file where the secret key is stored. # c.NotebookNotary.secret_file = '' # The hashing algorithm used to sign notebooks. # c.NotebookNotary.algorithm = 'sha256' # ------------------------------------------------------------------------------ # KernelSpecManager configuration # ------------------------------------------------------------------------------ # Whitelist of allowed kernel names. # # By default, all installed kernels are allowed. # c.KernelSpecManager.whitelist = set() jupyter_core-5.7.2/tests/dotipython_empty/profile_default/static/000077500000000000000000000000001457404620400254615ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython_empty/profile_default/static/custom/000077500000000000000000000000001457404620400267735ustar00rootroot00000000000000jupyter_core-5.7.2/tests/dotipython_empty/profile_default/static/custom/custom.css000066400000000000000000000002211457404620400310120ustar00rootroot00000000000000/* Placeholder for custom user CSS mainly to be overridden in profile/static/custom/custom.css This will always be an empty file in IPython */ jupyter_core-5.7.2/tests/dotipython_empty/profile_default/static/custom/custom.js000066400000000000000000000053631457404620400306520ustar00rootroot00000000000000// leave at least 2 line with only a star on it below, or doc generation fails /** * * * Placeholder for custom user javascript * mainly to be overridden in profile/static/custom/custom.js * This will always be an empty file in IPython * * User could add any javascript in the `profile/static/custom/custom.js` file. * It will be executed by the ipython notebook at load time. * * Same thing with `profile/static/custom/custom.css` to inject custom css into the notebook. * * * The object available at load time depend on the version of IPython in use. * there is no guaranties of API stability. * * The example below explain the principle, and might not be valid. * * Instances are created after the loading of this file and might need to be accessed using events: * define([ * 'base/js/namespace', * 'base/js/events' * ], function(IPython, events) { * events.on("app_initialized.NotebookApp", function () { * IPython.keyboard_manager.... * }); * }); * * __Example 1:__ * * Create a custom button in toolbar that execute `%qtconsole` in kernel * and hence open a qtconsole attached to the same kernel as the current notebook * * define([ * 'base/js/namespace', * 'base/js/events' * ], function(IPython, events) { * events.on('app_initialized.NotebookApp', function(){ * IPython.toolbar.add_buttons_group([ * { * 'label' : 'run qtconsole', * 'icon' : 'icon-terminal', // select your icon from http://fortawesome.github.io/Font-Awesome/icons * 'callback': function () { * IPython.notebook.kernel.execute('%qtconsole') * } * } * // add more button here if needed. * ]); * }); * }); * * __Example 2:__ * * At the completion of the dashboard loading, load an unofficial javascript extension * that is installed in profile/static/custom/ * * define([ * 'base/js/events' * ], function(events) { * events.on('app_initialized.DashboardApp', function(){ * require(['custom/unofficial_extension.js']) * }); * }); * * __Example 3:__ * * Use `jQuery.getScript(url [, success(script, textStatus, jqXHR)] );` * to load custom script into the notebook. * * // to load the metadata ui extension example. * $.getScript('/static/notebook/js/celltoolbarpresets/example.js'); * // or * // to load the metadata ui extension to control slideshow mode / reveal js for nbconvert * $.getScript('/static/notebook/js/celltoolbarpresets/slideshow.js'); * * * @module IPython * @namespace IPython * @class customjs * @static */ jupyter_core-5.7.2/tests/mocking.py000066400000000000000000000012641457404620400174130ustar00rootroot00000000000000"""General mocking utilities""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import os import sys from unittest.mock import patch class MultiPatch: def __init__(self, *patchers): self.patchers = patchers def __enter__(self): for p in self.patchers: p.start() def __exit__(self, *args): for p in self.patchers: p.stop() darwin = MultiPatch( patch.object(os, "name", "posix"), patch.object(sys, "platform", "darwin"), ) linux = MultiPatch( patch.object(os, "name", "posix"), patch.object(sys, "platform", "linux2"), ) jupyter_core-5.7.2/tests/test_application.py000066400000000000000000000105261457404620400213270ustar00rootroot00000000000000from __future__ import annotations import asyncio import os import shutil from tempfile import mkdtemp from unittest.mock import patch import pytest from traitlets import Integer from jupyter_core.application import JupyterApp, JupyterAsyncApp, NoStart from jupyter_core.utils import ensure_event_loop pjoin = os.path.join def test_basic(): JupyterApp() def test_default_traits(): app = JupyterApp() for trait_name in app.traits(): getattr(app, trait_name) class DummyApp(JupyterApp): name = "dummy-app" m = Integer(0, config=True) n = Integer(0, config=True) _dummy_config = """ c.DummyApp.n = 10 """ def test_custom_config(): app = DummyApp() td = mkdtemp() fname = pjoin(td, "config.py") with open(fname, "w", encoding="utf-8") as f: f.write(_dummy_config) app.initialize(["--config", fname]) shutil.rmtree(td) assert app.config_file == fname assert app.n == 10 def test_cli_override(): app = DummyApp() td = mkdtemp() fname = pjoin(td, "config.py") with open(fname, "w", encoding="utf-8") as f: f.write(_dummy_config) app.initialize(["--config", fname, "--DummyApp.n=20"]) shutil.rmtree(td) assert app.n == 20 def test_generate_config(): td = mkdtemp() app = DummyApp(config_dir=td) app.initialize(["--generate-config"]) assert app.generate_config with pytest.raises(NoStart): app.start() assert os.path.exists(os.path.join(td, "dummy_app_config.py")) def test_load_config(): config_dir = mkdtemp() os.environ["JUPYTER_CONFIG_PATH"] = str(config_dir) with open(pjoin(config_dir, "dummy_app_config.py"), "w", encoding="utf-8") as f: f.write("c.DummyApp.m = 1\n") f.write("c.DummyApp.n = 1") app = DummyApp(config_dir=config_dir) app.initialize([]) assert app.n == 1, "Loaded config from config dir" assert app.m == 1, "Loaded config from config dir" shutil.rmtree(config_dir) del os.environ["JUPYTER_CONFIG_PATH"] def test_load_config_no_cwd(): config_dir = mkdtemp() wd = mkdtemp() with open(pjoin(wd, "dummy_app_config.py"), "w", encoding="utf-8") as f: f.write("c.DummyApp.m = 1\n") f.write("c.DummyApp.n = 1") with patch.object(os, "getcwd", lambda: wd): app = DummyApp(config_dir=config_dir) app.initialize([]) assert app.n == 0 assert app.m == 0 shutil.rmtree(config_dir) shutil.rmtree(wd) def test_load_bad_config(): config_dir = mkdtemp() os.environ["JUPYTER_CONFIG_PATH"] = str(config_dir) with open(pjoin(config_dir, "dummy_app_config.py"), "w", encoding="utf-8") as f: f.write('c.DummyApp.m = "a\n') # Syntax error with pytest.raises(SyntaxError): # noqa: PT012 app = DummyApp(config_dir=config_dir) app.raise_config_file_errors = True app.initialize([]) shutil.rmtree(config_dir) del os.environ["JUPYTER_CONFIG_PATH"] def test_runtime_dir_changed(): app = DummyApp() td = mkdtemp() shutil.rmtree(td) app.runtime_dir = td assert os.path.isdir(td) shutil.rmtree(td) class AsyncioRunApp(JupyterApp): async def _inner(self): pass def start(self): asyncio.run(self._inner()) def test_asyncio_run(): AsyncioRunApp.launch_instance([]) AsyncioRunApp.clear_instance() class SyncTornadoApp(JupyterApp): async def _inner(self): self.running_loop = asyncio.get_running_loop() def start(self): self.starting_loop = ensure_event_loop() loop = asyncio.get_event_loop() loop.run_until_complete(self._inner()) loop.close() def test_sync_tornado_run(): SyncTornadoApp.launch_instance([]) app = SyncTornadoApp.instance() assert app.running_loop == app.starting_loop SyncTornadoApp.clear_instance() class AsyncApp(JupyterAsyncApp): async def initialize_async(self, argv): self.value = 10 async def start_async(self): assert self.value == 10 def test_async_app(): AsyncApp.launch_instance([]) app = AsyncApp.instance() assert app.value == 10 AsyncApp.clear_instance() class AsyncTornadoApp(AsyncApp): _prefer_selector_loop = True def test_async_tornado_app(): AsyncTornadoApp.launch_instance([]) app = AsyncApp.instance() assert app._prefer_selector_loop is True AsyncTornadoApp.clear_instance() jupyter_core-5.7.2/tests/test_command.py000066400000000000000000000162171457404620400204450ustar00rootroot00000000000000"""Test the Jupyter command-line""" from __future__ import annotations import json import os import sys import sysconfig from subprocess import PIPE, CalledProcessError, check_output from unittest.mock import patch import pytest from jupyter_core import __version__ from jupyter_core.command import list_subcommands from jupyter_core.paths import ( jupyter_config_dir, jupyter_config_path, jupyter_data_dir, jupyter_path, jupyter_runtime_dir, ) resetenv = patch.dict(os.environ) def setup_function(): resetenv.start() for var in [ "JUPYTER_CONFIG_DIR", "JUPYTER_CONFIG_PATH", "JUPYTER_DATA_DIR", "JUPYTER_NO_CONFIG", "JUPYTER_PATH", "JUPYTER_PLATFORM_DIRS", "JUPYTER_RUNTIME_DIR", ]: os.environ.pop(var, None) def teardown_function(): resetenv.stop() def get_jupyter_output(cmd): """Get output of a jupyter command""" if not isinstance(cmd, list): cmd = [cmd] return ( check_output([sys.executable, "-m", "jupyter_core", *cmd], stderr=PIPE) .decode("utf8") .strip() ) def write_executable(path, source): if sys.platform == "win32": script = path.dirpath() / path.purebasename + "-script.py" exe = path.dirpath() / path.purebasename + ".exe" else: script = path script.write(source) script.chmod(0o700) if sys.platform == "win32": try: import importlib.resources if not hasattr(importlib.resources, "files"): raise ImportError wp = importlib.resources.files("setuptools").joinpath("cli-32.exe") w = wp.read_bytes() except (ImportError, FileNotFoundError, SystemError): pytest.skip( "Need importlib.resources and setuptools to make scripts executable on Windows" ) exe.write(w, "wb") exe.chmod(0o700) def assert_output(cmd, expected): assert get_jupyter_output(cmd) == expected def test_config_dir(): assert_output("--config-dir", jupyter_config_dir()) def test_data_dir(): assert_output("--data-dir", jupyter_data_dir()) def test_runtime_dir(): assert_output("--runtime-dir", jupyter_runtime_dir()) def test_paths(): output = get_jupyter_output("--paths") for d in (jupyter_config_dir(), jupyter_data_dir(), jupyter_runtime_dir()): assert d in output for key in ("config", "data", "runtime"): assert ("%s:" % key) in output for path in (jupyter_config_path(), jupyter_path()): for d in path: assert d in output def test_paths_json(): output = get_jupyter_output(["--paths", "--json"]) data = json.loads(output) assert sorted(data) == ["config", "data", "runtime"] for _, path in data.items(): assert isinstance(path, list) def test_paths_debug(): names = [ "JUPYTER_PREFER_ENV_PATH", "JUPYTER_NO_CONFIG", "JUPYTER_CONFIG_PATH", "JUPYTER_CONFIG_DIR", "JUPYTER_PATH", "JUPYTER_DATA_DIR", "JUPYTER_RUNTIME_DIR", ] output = get_jupyter_output(["--paths", "--debug"]) for v in names: assert f"{v} is not set" in output with patch.dict("os.environ", [(v, "y") for v in names]): output = get_jupyter_output(["--paths", "--debug"]) for v in names: assert f"{v} is set" in output def test_subcommand_not_given(): with pytest.raises(CalledProcessError): get_jupyter_output([]) def test_help(): output = get_jupyter_output("-h") assert "--help" in output def test_subcommand_not_found(): with pytest.raises(CalledProcessError) as excinfo: get_jupyter_output("nonexistant-subcommand") stderr = excinfo.value.stderr.decode("utf8") assert "Jupyter command `jupyter-nonexistant-subcommand` not found." in stderr @patch.object(sys, "argv", [__file__] + sys.argv[1:]) def test_subcommand_list(tmpdir): a = tmpdir.mkdir("a") for cmd in ("jupyter-foo-bar", "jupyter-xyz", "jupyter-babel-fish"): a.join(cmd).write("") b = tmpdir.mkdir("b") for cmd in ("jupyter-foo", "jupyterstuff", "jupyter-yo-eyropa-ganymyde-callysto"): b.join(cmd).write("") c = tmpdir.mkdir("c") for cmd in ("jupyter-baz", "jupyter-bop"): c.join(cmd).write("") path = os.pathsep.join(map(str, [a, b])) def get_path(dummy): return str(c) with patch.object(sysconfig, "get_path", get_path), patch.dict("os.environ", {"PATH": path}): subcommands = list_subcommands() assert subcommands == [ "babel-fish", "baz", "bop", "foo", "xyz", "yo-eyropa-ganymyde-callysto", ] skip_darwin = pytest.mark.skipif(sys.platform == "darwin", reason="Fails on macos") @skip_darwin def test_not_on_path(tmpdir): a = tmpdir.mkdir("a") jupyter = a.join("jupyter") jupyter.write("from jupyter_core import command; command.main()") jupyter.chmod(0o700) witness = a.join("jupyter-witness") witness_src = "#!{}\n{}\n".format(sys.executable, 'print("WITNESS ME")') write_executable(witness, witness_src) env = {"PATH": ""} if "SYSTEMROOT" in os.environ: # Windows http://bugs.python.org/issue20614 env["SYSTEMROOT"] = os.environ["SYSTEMROOT"] if sys.platform == "win32": env["PATHEXT"] = ".EXE" # This won't work on windows unless out = check_output([sys.executable, str(jupyter), "witness"], env=env) assert b"WITNESS" in out @skip_darwin def test_path_priority(tmpdir): a = tmpdir.mkdir("a") jupyter = a.join("jupyter") jupyter.write("from jupyter_core import command; command.main()") jupyter.chmod(0o700) witness_a = a.join("jupyter-witness") witness_a_src = "#!{}\n{}\n".format(sys.executable, 'print("WITNESS A")') write_executable(witness_a, witness_a_src) b = tmpdir.mkdir("b") witness_b = b.join("jupyter-witness") witness_b_src = "#!{}\n{}\n".format(sys.executable, 'print("WITNESS B")') write_executable(witness_b, witness_b_src) env = {"PATH": str(b)} if "SYSTEMROOT" in os.environ: # Windows http://bugs.python.org/issue20614 env["SYSTEMROOT"] = os.environ["SYSTEMROOT"] if sys.platform == "win32": env["PATHEXT"] = ".EXE" out = check_output([sys.executable, str(jupyter), "witness"], env=env) assert b"WITNESS A" in out @skip_darwin def test_argv0(tmpdir): a = tmpdir.mkdir("a") jupyter = a.join("jupyter") jupyter.write("from jupyter_core import command; command.main()") jupyter.chmod(0o700) witness_a = a.join("jupyter-witness") witness_a_src = f"""#!{sys.executable} import sys print(sys.argv[0]) """ write_executable(witness_a, witness_a_src) env = {} if "SYSTEMROOT" in os.environ: # Windows http://bugs.python.org/issue20614 env["SYSTEMROOT"] = os.environ["SYSTEMROOT"] if sys.platform == "win32": env["PATHEXT"] = ".EXE" out = check_output([sys.executable, str(jupyter), "witness"], env=env) # Make sure the first argv is the full path to the executing script assert f"{jupyter}-witness".encode() in out def test_version(): assert isinstance(__version__, str) jupyter_core-5.7.2/tests/test_migrate.py000066400000000000000000000150271457404620400204550ustar00rootroot00000000000000# Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. """Test config file migration""" from __future__ import annotations import os import re import shutil from tempfile import mkdtemp from unittest.mock import patch import pytest from jupyter_core import migrate as migrate_mod from jupyter_core.application import JupyterApp from jupyter_core.migrate import ( migrate, migrate_config, migrate_dir, migrate_file, migrate_one, migrate_static_custom, ) from jupyter_core.utils import ensure_dir_exists pjoin = os.path.join here = os.path.dirname(__file__) dotipython = pjoin(here, "dotipython") dotipython_empty = pjoin(here, "dotipython_empty") @pytest.fixture() def td(request): """Fixture for a temporary directory""" td = mkdtemp("μnïcø∂e") yield td shutil.rmtree(td) @pytest.fixture() def env(request): """Fixture for a full testing environment""" td = mkdtemp() env = { "TESTDIR": td, "IPYTHONDIR": pjoin(td, "ipython"), "JUPYTER_CONFIG_DIR": pjoin(td, "jupyter"), "JUPYTER_DATA_DIR": pjoin(td, "jupyter_data"), "JUPYTER_RUNTIME_DIR": pjoin(td, "jupyter_runtime"), "JUPYTER_PATH": "", } env_patch = patch.dict(os.environ, env) env_patch.start() yield env env_patch.stop() shutil.rmtree(td, ignore_errors=os.name == "nt") def touch(path, content=""): ensure_dir_exists(os.path.dirname(path)) with open(path, "w", encoding="utf-8") as f: f.write(content) def assert_files_equal(a, b): """Verify that two files match""" assert os.path.exists(b) with open(a, encoding="utf-8") as f: a_txt = f.read() with open(b, encoding="utf-8") as f: b_txt = f.read() assert a_txt == b_txt def test_migrate_file(td): src = pjoin(td, "src") dst = pjoin(td, "dst") touch(src, "test file") assert migrate_file(src, dst) assert_files_equal(src, dst) src2 = pjoin(td, "src2") touch(src2, "different src") assert not migrate_file(src2, dst) assert_files_equal(src, dst) def test_migrate_dir(td): src = pjoin(td, "src") dst = pjoin(td, "dst") os.mkdir(src) assert not migrate_dir(src, dst) assert not os.path.exists(dst) touch(pjoin(src, "f"), "test file") assert migrate_dir(src, dst) assert_files_equal(pjoin(src, "f"), pjoin(dst, "f")) touch(pjoin(src, "g"), "other test file") assert not migrate_dir(src, dst) assert not os.path.exists(pjoin(dst, "g")) shutil.rmtree(dst) os.mkdir(dst) assert migrate_dir(src, dst) assert_files_equal(pjoin(src, "f"), pjoin(dst, "f")) assert_files_equal(pjoin(src, "g"), pjoin(dst, "g")) def test_migrate_one(td): src = pjoin(td, "src") srcdir = pjoin(td, "srcdir") dst = pjoin(td, "dst") dstdir = pjoin(td, "dstdir") touch(src, "test file") touch(pjoin(srcdir, "f"), "test dir file") called = {} def notice_m_file(src, dst): called["migrate_file"] = True return migrate_file(src, dst) def notice_m_dir(src, dst): called["migrate_dir"] = True return migrate_dir(src, dst) with patch.object(migrate_mod, "migrate_file", notice_m_file), patch.object( migrate_mod, "migrate_dir", notice_m_dir ): assert migrate_one(src, dst) assert called == {"migrate_file": True} called.clear() assert migrate_one(srcdir, dstdir) assert called == {"migrate_dir": True} called.clear() assert not migrate_one(pjoin(td, "does_not_exist"), dst) assert called == {} def test_migrate_config(td): profile = pjoin(td, "profile") jpy = pjoin(td, "jupyter_config") ensure_dir_exists(profile) env = { "profile": profile, "jupyter_config": jpy, } cfg_py = pjoin(profile, "ipython_test_config.py") touch(cfg_py, "c.Klass.trait = 5\n") empty_cfg_py = pjoin(profile, "ipython_empty_config.py") touch(empty_cfg_py, "# c.Klass.trait = 5\n") assert not migrate_config("empty", env) assert not os.path.exists(jpy) with patch.dict( migrate_mod.config_substitutions, { re.compile(r"\bKlass\b"): "Replaced", }, ): assert migrate_config("test", env) assert os.path.isdir(jpy) assert sorted(os.listdir(jpy)) == [ "jupyter_test_config.py", ] with open(pjoin(jpy, "jupyter_test_config.py"), encoding="utf-8") as f: text = f.read() assert text == "c.Replaced.trait = 5\n" def test_migrate_custom_default(td): profile = pjoin(dotipython, "profile_default") src = pjoin(profile, "static", "custom") assert os.path.exists(src) assert not migrate_static_custom(src, td) src = pjoin(td, "src") dst = pjoin(td, "dst") os.mkdir(src) src_custom_js = pjoin(src, "custom.js") src_custom_css = pjoin(src, "custom.css") touch(src_custom_js, "var a=5;") touch(src_custom_css, "div { height: 5px; }") assert migrate_static_custom(src, dst) def test_migrate_nothing(env): migrate() assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] assert not os.path.exists(env["JUPYTER_DATA_DIR"]) def test_migrate_default(env): shutil.copytree(dotipython_empty, env["IPYTHONDIR"]) migrate() assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] assert not os.path.exists(env["JUPYTER_DATA_DIR"]) def test_migrate(env): shutil.copytree(dotipython, env["IPYTHONDIR"]) migrate() assert os.path.exists(env["JUPYTER_CONFIG_DIR"]) assert os.path.exists(env["JUPYTER_DATA_DIR"]) def test_app_migrate(env): shutil.copytree(dotipython, env["IPYTHONDIR"]) app = JupyterApp() app.initialize([]) assert os.path.exists(env["JUPYTER_CONFIG_DIR"]) assert os.path.exists(env["JUPYTER_DATA_DIR"]) def test_app_migrate_skip_if_marker(env): shutil.copytree(dotipython, env["IPYTHONDIR"]) touch(pjoin(env["JUPYTER_CONFIG_DIR"], "migrated"), "done") app = JupyterApp() app.initialize([]) assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] assert not os.path.exists(env["JUPYTER_DATA_DIR"]) def test_app_migrate_skip_unwritable_marker(env): shutil.copytree(dotipython, env["IPYTHONDIR"]) migrated_marker = pjoin(env["JUPYTER_CONFIG_DIR"], "migrated") touch(migrated_marker, "done") os.chmod(migrated_marker, 0) # make it unworkable app = JupyterApp() app.initialize([]) assert os.listdir(env["JUPYTER_CONFIG_DIR"]) == ["migrated"] assert not os.path.exists(env["JUPYTER_DATA_DIR"]) jupyter_core-5.7.2/tests/test_paths.py000066400000000000000000000435231457404620400201460ustar00rootroot00000000000000"""Tests for paths""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import os import re import shutil import site import stat import subprocess import sys import tempfile import warnings from unittest.mock import patch import pytest from platformdirs import __version_info__ from jupyter_core import paths from jupyter_core.paths import ( UF_HIDDEN, _win32_restrict_file_to_user_ctypes, exists, is_file_hidden, is_hidden, issue_insecure_write_warning, jupyter_config_dir, jupyter_config_path, jupyter_data_dir, jupyter_path, jupyter_runtime_dir, prefer_environment_over_user, secure_write, ) pjoin = os.path.join macos = pytest.mark.skipif(sys.platform != "darwin", reason="only run on macos") windows = pytest.mark.skipif(sys.platform != "win32", reason="only run on windows") linux = pytest.mark.skipif(sys.platform != "linux", reason="only run on linux") xdg_env = { "XDG_CONFIG_HOME": "/tmp/xdg/config", "XDG_DATA_HOME": "/tmp/xdg/data", "XDG_RUNTIME_DIR": "/tmp/xdg/runtime", } xdg = patch.dict("os.environ", xdg_env) no_xdg = patch.dict( "os.environ", {}, ) environment = patch.dict("os.environ") use_platformdirs = patch.dict("os.environ", {"JUPYTER_PLATFORM_DIRS": "1"}) jupyter_config_env = "/jupyter-cfg" config_env = patch.dict("os.environ", {"JUPYTER_CONFIG_DIR": jupyter_config_env}) prefer_env = patch.dict("os.environ", {"JUPYTER_PREFER_ENV_PATH": "True"}) prefer_user = patch.dict("os.environ", {"JUPYTER_PREFER_ENV_PATH": "False"}) def setup_function(): environment.start() for var in [ "JUPYTER_CONFIG_DIR", "JUPYTER_CONFIG_PATH", "JUPYTER_DATA_DIR", "JUPYTER_NO_CONFIG", "JUPYTER_PATH", "JUPYTER_PLATFORM_DIRS", "JUPYTER_RUNTIME_DIR", ]: os.environ.pop(var, None) # For these tests, default to preferring the user-level over environment-level paths # Tests can override this preference using the prefer_env decorator/context manager os.environ["JUPYTER_PREFER_ENV_PATH"] = "no" def teardown_function(): environment.stop() def realpath(path): return os.path.abspath(os.path.realpath(os.path.expanduser(path))) home_jupyter = realpath("~/.jupyter") def test_envset(): true_values = ["", "True", "on", "yes", "Y", "1", "anything"] false_values = ["n", "No", "N", "fAlSE", "0", "0.0", "Off"] with patch.dict("os.environ", ((f"FOO_{v}", v) for v in true_values + false_values)): for v in true_values: assert paths.envset(f"FOO_{v}") for v in false_values: assert not paths.envset(f"FOO_{v}") # Test default value is False assert paths.envset("THIS_VARIABLE_SHOULD_NOT_BE_SET") is False # Test envset returns the given default if supplied assert paths.envset("THIS_VARIABLE_SHOULD_NOT_BE_SET", None) is None def test_config_dir(): config = jupyter_config_dir() assert config == home_jupyter @macos @use_platformdirs def test_config_dir_darwin(): config = jupyter_config_dir() assert config == realpath("~/Library/Application Support/Jupyter") @windows @use_platformdirs def test_config_dir_windows(): config = jupyter_config_dir() assert config == realpath(pjoin(os.environ.get("LOCALAPPDATA", ""), "Jupyter")) @linux @use_platformdirs def test_config_dir_linux(): config = jupyter_config_dir() assert config == realpath("~/.config/jupyter") def test_config_env_legacy(): with config_env: config = jupyter_config_dir() assert config == jupyter_config_env @use_platformdirs def test_config_env(): with config_env: config = jupyter_config_dir() assert config == jupyter_config_env def test_data_dir_env_legacy(): data_env = "runtime-dir" with patch.dict("os.environ", {"JUPYTER_DATA_DIR": data_env}): data = jupyter_data_dir() assert data == data_env @use_platformdirs def test_data_dir_env(): data_env = "runtime-dir" with patch.dict("os.environ", {"JUPYTER_DATA_DIR": data_env}): data = jupyter_data_dir() assert data == data_env @macos def test_data_dir_darwin_legacy(): data = jupyter_data_dir() assert data == realpath("~/Library/Jupyter") @macos @use_platformdirs def test_data_dir_darwin(): data = jupyter_data_dir() assert data == realpath("~/Library/Application Support/Jupyter") @windows def test_data_dir_windows_legacy(): data = jupyter_data_dir() assert data == realpath(pjoin(os.environ.get("APPDATA", ""), "jupyter")) @windows @use_platformdirs def test_data_dir_windows(): data = jupyter_data_dir() assert data == realpath(pjoin(os.environ.get("LOCALAPPDATA", ""), "Jupyter")) @linux def test_data_dir_linux_legacy(): with no_xdg: data = jupyter_data_dir() assert data == realpath("~/.local/share/jupyter") with xdg: data = jupyter_data_dir() assert data == pjoin(xdg_env["XDG_DATA_HOME"], "jupyter") @linux @use_platformdirs def test_data_dir_linux(): with no_xdg: data = jupyter_data_dir() assert data == realpath("~/.local/share/jupyter") with xdg: data = jupyter_data_dir() assert data == pjoin(xdg_env["XDG_DATA_HOME"], "jupyter") def test_runtime_dir_env_legacy(): rtd_env = "runtime-dir" with patch.dict("os.environ", {"JUPYTER_RUNTIME_DIR": rtd_env}): runtime = jupyter_runtime_dir() assert runtime == rtd_env @use_platformdirs def test_runtime_dir_env(): rtd_env = "runtime-dir" with patch.dict("os.environ", {"JUPYTER_RUNTIME_DIR": rtd_env}): runtime = jupyter_runtime_dir() assert runtime == rtd_env @macos def test_runtime_dir_darwin_legacy(): runtime = jupyter_runtime_dir() assert runtime == realpath("~/Library/Jupyter/runtime") @macos @use_platformdirs def test_runtime_dir_darwin(): runtime = jupyter_runtime_dir() if __version_info__[0] < 3: assert runtime == realpath("~/Library/Preferences/Jupyter/runtime") return assert runtime == realpath("~/Library/Application Support/Jupyter/runtime") @windows def test_runtime_dir_windows_legacy(): runtime = jupyter_runtime_dir() assert runtime == realpath(pjoin(os.environ.get("APPDATA", ""), "jupyter", "runtime")) @windows @use_platformdirs def test_runtime_dir_windows(): runtime = jupyter_runtime_dir() assert runtime == realpath(pjoin(os.environ.get("LOCALAPPDATA", ""), "Jupyter", "runtime")) @linux def test_runtime_dir_linux_legacy(): with no_xdg: runtime = jupyter_runtime_dir() assert runtime == realpath("~/.local/share/jupyter/runtime") with xdg: runtime = jupyter_runtime_dir() assert runtime == pjoin(xdg_env["XDG_DATA_HOME"], "jupyter", "runtime") @linux @use_platformdirs def test_runtime_dir_linux(): with no_xdg: runtime = jupyter_runtime_dir() assert runtime == realpath("~/.local/share/jupyter/runtime") with xdg: runtime = jupyter_runtime_dir() assert runtime == pjoin(xdg_env["XDG_DATA_HOME"], "jupyter", "runtime") def test_jupyter_path(): system_path = ["system", "path"] with patch.object(paths, "SYSTEM_JUPYTER_PATH", system_path): path = jupyter_path() assert path[0] == jupyter_data_dir() assert path[-2:] == system_path def test_jupyter_path_user_site(): with patch.object(site, "ENABLE_USER_SITE", True): path = jupyter_path() # deduplicated expected values values = list( dict.fromkeys( [ jupyter_data_dir(), os.path.join(site.getuserbase(), "share", "jupyter"), paths.ENV_JUPYTER_PATH[0], ] ) ) for p, v in zip(path, values): assert p == v def test_jupyter_path_no_user_site(): with patch.object(site, "ENABLE_USER_SITE", False): path = jupyter_path() assert path[0] == jupyter_data_dir() assert path[1] == paths.ENV_JUPYTER_PATH[0] def test_jupyter_path_prefer_env(): with prefer_env: path = jupyter_path() assert path[0] == paths.ENV_JUPYTER_PATH[0] assert path[1] == jupyter_data_dir() def test_jupyter_path_env(): path_env = os.pathsep.join( [ pjoin("foo", "bar"), pjoin("bar", "baz", ""), # trailing / ] ) with patch.dict("os.environ", {"JUPYTER_PATH": path_env}): path = jupyter_path() assert path[:2] == [pjoin("foo", "bar"), pjoin("bar", "baz")] def test_jupyter_path_sys_prefix(): with patch.object(paths, "ENV_JUPYTER_PATH", ["sys_prefix"]): path = jupyter_path() assert "sys_prefix" in path def test_jupyter_path_subdir(): path = jupyter_path("sub1", "sub2") for p in path: assert p.endswith(pjoin("", "sub1", "sub2")) def test_jupyter_config_path(): with patch.object(site, "ENABLE_USER_SITE", True): path = jupyter_config_path() # deduplicated expected values values = list( dict.fromkeys( [ jupyter_config_dir(), os.path.join(site.getuserbase(), "etc", "jupyter"), paths.ENV_CONFIG_PATH[0], ] ) ) for p, v in zip(path, values): assert p == v def test_jupyter_config_path_no_user_site(): with patch.object(site, "ENABLE_USER_SITE", False): path = jupyter_config_path() assert path[0] == jupyter_config_dir() assert path[1] == paths.ENV_CONFIG_PATH[0] def test_jupyter_config_path_prefer_env(): with prefer_env, patch.object(site, "ENABLE_USER_SITE", True): path = jupyter_config_path() # deduplicated expected values values = list( dict.fromkeys( [ paths.ENV_CONFIG_PATH[0], jupyter_config_dir(), os.path.join(site.getuserbase(), "etc", "jupyter"), ] ) ) for p, v in zip(path, values): assert p == v def test_jupyter_config_path_env(): path_env = os.pathsep.join( [ pjoin("foo", "bar"), pjoin("bar", "baz", ""), # trailing / ] ) with patch.dict("os.environ", {"JUPYTER_CONFIG_PATH": path_env}): path = jupyter_config_path() assert path[:2] == [pjoin("foo", "bar"), pjoin("bar", "baz")] def test_prefer_environment_over_user(): with prefer_env: assert prefer_environment_over_user() is True with prefer_user: assert prefer_environment_over_user() is False # Test default if environment variable is not set, and try to determine if we are in a virtual environment os.environ.pop("JUPYTER_PREFER_ENV_PATH", None) # base prefix differs, venv with patch.object(sys, "base_prefix", "notthesame"): assert prefer_environment_over_user() == paths._do_i_own(sys.prefix) # conda with patch.object(sys, "base_prefix", sys.prefix): # in base env, don't prefer it with patch.dict(os.environ, {"CONDA_PREFIX": sys.prefix, "CONDA_DEFAULT_ENV": "base"}): assert not prefer_environment_over_user() # in non-base env, prefer it with patch.dict(os.environ, {"CONDA_PREFIX": sys.prefix, "CONDA_DEFAULT_ENV": "/tmp"}): assert prefer_environment_over_user() == paths._do_i_own(sys.prefix) # conda env defined, but we aren't using it with patch.dict( os.environ, {"CONDA_PREFIX": "/somewherelese", "CONDA_DEFAULT_ENV": "/tmp"} ): assert not prefer_environment_over_user() def test_is_hidden(): with tempfile.TemporaryDirectory() as root: subdir1 = os.path.join(root, "subdir") os.makedirs(subdir1) assert not is_hidden(subdir1, root) assert not is_file_hidden(subdir1) subdir2 = os.path.join(root, ".subdir2") os.makedirs(subdir2) assert is_hidden(subdir2, root) assert is_file_hidden(subdir2) # root dir is always visible assert not is_hidden(subdir2, subdir2) subdir34 = os.path.join(root, "subdir3", ".subdir4") os.makedirs(subdir34) assert is_hidden(subdir34, root) assert is_hidden(subdir34) subdir56 = os.path.join(root, ".subdir5", "subdir6") os.makedirs(subdir56) assert is_hidden(subdir56, root) assert is_hidden(subdir56) assert not is_file_hidden(subdir56) assert not is_file_hidden(subdir56, os.stat(subdir56)) assert not is_file_hidden(os.path.join(root, "does_not_exist")) subdir78 = os.path.join(root, "subdir7", "subdir8") os.makedirs(subdir78) assert not is_hidden(subdir78, root) if hasattr(os, "chflags"): os.chflags(subdir78, UF_HIDDEN) assert is_hidden(subdir78, root) @pytest.mark.skipif( not ( sys.platform == "win32" and (("__pypy__" not in sys.modules) or (sys.implementation.version >= (7, 3, 6))) ), reason="only run on windows/cpython or pypy >= 7.3.6: https://foss.heptapod.net/pypy/pypy/-/issues/3469", ) def test_is_hidden_win32_cpython(): with tempfile.TemporaryDirectory() as root: subdir1 = os.path.join(root, "subdir") os.makedirs(subdir1) assert not is_hidden(subdir1, root) subprocess.check_call(["attrib", "+h", subdir1]) assert is_hidden(subdir1, root) assert is_file_hidden(subdir1) @pytest.mark.skipif( not ( sys.platform == "win32" and "__pypy__" in sys.modules and sys.implementation.version < (7, 3, 6) ), reason="only run on windows/pypy < 7.3.6: https://foss.heptapod.net/pypy/pypy/-/issues/3469", ) def test_is_hidden_win32_pypy(): import ctypes # noqa: F401 with tempfile.TemporaryDirectory() as root: subdir1 = os.path.join(root, "subdir") os.makedirs(subdir1) assert not is_hidden(subdir1, root) subprocess.check_call(["attrib", "+h", subdir1]) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # Trigger a warning. assert not is_hidden(subdir1, root) # Verify the warning was triggered assert len(w) == 1 assert issubclass(w[-1].category, UserWarning) assert "hidden files are not detectable on this system" in str(w[-1].message) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # Trigger a warning. assert not is_file_hidden(subdir1) # Verify the warning was triggered assert len(w) == 1 assert issubclass(w[-1].category, UserWarning) assert "hidden files are not detectable on this system" in str(w[-1].message) @pytest.mark.skipif(sys.platform != "win32", reason="only runs on windows") def test_win32_restrict_file_to_user_ctypes(tmp_path): _win32_restrict_file_to_user_ctypes(str(tmp_path)) @pytest.mark.skipif(sys.platform != "win32", reason="only runs on windows") def test_secure_write_win32(): def fetch_win32_permissions(filename): """Extracts file permissions on windows using icacls""" role_permissions = {} proc = os.popen("icacls %s" % filename) lines = proc.read().splitlines() proc.close() for index, line in enumerate(lines): if index == 0: line = line.split(filename)[-1].strip().lower() # noqa: PLW2901 match = re.match(r"\s*([^:]+):\(([^\)]*)\)", line) if match: usergroup, permissions = match.groups() usergroup = usergroup.lower().split("\\")[-1] permissions = {p.lower() for p in permissions.split(",")} role_permissions[usergroup] = permissions elif not line.strip(): break return role_permissions def check_user_only_permissions(fname): # Windows has it's own permissions ACL patterns username = os.environ["USERNAME"].lower() permissions = fetch_win32_permissions(fname) print(permissions) # for easier debugging assert username in permissions assert permissions[username] == {"r", "w", "d"} assert "administrators" in permissions assert permissions["administrators"] == {"f"} assert "everyone" not in permissions assert len(permissions) == 2 directory = tempfile.mkdtemp() fname = os.path.join(directory, "check_perms") try: with secure_write(fname) as f: f.write("test 1") check_user_only_permissions(fname) with open(fname, encoding="utf-8") as f: assert f.read() == "test 1" finally: shutil.rmtree(directory) @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_secure_write_unix(): directory = tempfile.mkdtemp() fname = os.path.join(directory, "check_perms") try: with secure_write(fname) as f: f.write("test 1") mode = os.stat(fname).st_mode assert (stat.S_IMODE(mode) & 0o7677) == 0o0600 # tolerate owner-execute bit with open(fname, encoding="utf-8") as f: assert f.read() == "test 1" # Try changing file permissions ahead of time os.chmod(fname, 0o755) with secure_write(fname) as f: f.write("test 2") mode = os.stat(fname).st_mode assert (stat.S_IMODE(mode) & 0o7677) == 0o0600 # tolerate owner-execute bit with open(fname, encoding="utf-8") as f: assert f.read() == "test 2" finally: shutil.rmtree(directory) def test_exists(tmpdir): assert exists(str(tmpdir)) def test_insecure_write_warning(): with warnings.catch_warnings(): warnings.simplefilter("ignore", UserWarning) issue_insecure_write_warning() jupyter_core-5.7.2/tests/test_troubleshoot.py000066400000000000000000000003511457404620400215500ustar00rootroot00000000000000from __future__ import annotations from jupyter_core.troubleshoot import main def test_troubleshoot(capsys): """Smoke test the troubleshoot function""" main() out = capsys.readouterr().out assert "pip list" in out jupyter_core-5.7.2/tests/test_utils.py000066400000000000000000000025421457404620400201630ustar00rootroot00000000000000"""Tests for utils""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import annotations import asyncio import os import tempfile import pytest from jupyter_core.utils import ( deprecation, ensure_async, ensure_dir_exists, ensure_event_loop, run_sync, ) def test_ensure_dir_exists(): with tempfile.TemporaryDirectory() as td: ensure_dir_exists(td) ensure_dir_exists(os.path.join(str(td), "foo"), 0o777) def test_deprecation(): with pytest.deprecated_call(): deprecation("foo") async def afunc(): return "afunc" def func(): return "func" sync_afunc = run_sync(afunc) def test_run_sync(): async def foo(): return 1 foo_sync = run_sync(foo) assert foo_sync() == 1 assert foo_sync() == 1 ensure_event_loop().close() asyncio.set_event_loop(None) assert foo_sync() == 1 ensure_event_loop().close() asyncio.run(foo()) def test_ensure_async(): async def main(): assert await ensure_async(afunc()) == "afunc" assert await ensure_async(func()) == "func" asyncio.run(main()) def test_ensure_event_loop(): loop = ensure_event_loop() async def inner(): return asyncio.get_running_loop() inner_sync = run_sync(inner) assert inner_sync() == loop