pax_global_header 0000666 0000000 0000000 00000000064 14370413656 0014523 g ustar 00root root 0000000 0000000 52 comment=8f79d51e6fbd1031e4f0ed31cd60c3c3333850e2
napari-0.5.0a1/ 0000775 0000000 0000000 00000000000 14370413656 0013221 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/.devcontainer/ 0000775 0000000 0000000 00000000000 14370413656 0015760 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/.devcontainer/Dockerfile 0000664 0000000 0000000 00000000706 14370413656 0017755 0 ustar 00root root 0000000 0000000 # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/python-3-miniconda/.devcontainer/base.Dockerfile
FROM mcr.microsoft.com/vscode/devcontainers/miniconda:0-3
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends \
libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 \
libxcb-render-util0 libxcb-xinerama0 libxkbcommon-x11-0 napari-0.5.0a1/.devcontainer/add-notice.sh 0000664 0000000 0000000 00000001544 14370413656 0020327 0 ustar 00root root 0000000 0000000 # Display a notice when not running in GitHub Codespaces
cat << 'EOF' > /usr/local/etc/vscode-dev-containers/conda-notice.txt
When using "conda" from outside of GitHub Codespaces, note the Anaconda repository
contains restrictions on commercial use that may impact certain organizations. See
https://aka.ms/vscode-remote/conda/miniconda
EOF
notice_script="$(cat << 'EOF'
if [ -t 1 ] && [ "${IGNORE_NOTICE}" != "true" ] && [ "${TERM_PROGRAM}" = "vscode" ] && [ "${CODESPACES}" != "true" ] && [ ! -f "$HOME/.config/vscode-dev-containers/conda-notice-already-displayed" ]; then
cat "/usr/local/etc/vscode-dev-containers/conda-notice.txt"
mkdir -p "$HOME/.config/vscode-dev-containers"
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/conda-notice-already-displayed") &)
fi
EOF
)"
echo "${notice_script}" | tee -a /etc/bash.bashrc >> /etc/zsh/zshrc
napari-0.5.0a1/.devcontainer/devcontainer.json 0000664 0000000 0000000 00000002762 14370413656 0021343 0 ustar 00root root 0000000 0000000 // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/python-3-miniconda
{
"name": "Miniconda (Python 3)",
"build": {
"context": "..",
"dockerfile": "Dockerfile",
"args": {
"NODE_VERSION": "none"
}
},
// Set *default* container specific settings.json values on container create.
"settings": {
"python.defaultInterpreterPath": "/opt/conda/bin/python",
"python.linting.enabled": true,
"python.linting.mypyEnabled": true,
"python.linting.flake8Enabled": true,
"python.formatting.blackPath": "/opt/conda/bin/black",
"python.linting.flake8Path": "/opt/conda/bin/flake8",
"python.linting.mypyPath": "/opt/conda/bin/mypy",
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [5900, 5901, 6080],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "pip install -U pip && pip install -e .[pyqt, dev] && pre-commit install",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode",
"features": {
"git": "os-provided",
"github-cli": "latest",
"desktop-lite": {
"password": "napari",
"webPort": "6080",
"vncPort": "5901"
}
}
}
napari-0.5.0a1/.env_sample 0000664 0000000 0000000 00000003044 14370413656 0015354 0 ustar 00root root 0000000 0000000 # TO USE THIS FILE RENAME IT TO '.env'
# NOTE! Using this file requires `pip install python-dotenv`
# ──────────────────────────────────────────────────────────────
# Event Debugging, controls events.debugging.EventDebugSettings:
NAPARI_DEBUG_EVENTS=0
# these are strict json, use double quotes
# if INCLUDE_X is used, EXCLUDE_X is ignored.
EVENT_DEBUG_INCLUDE_EMITTERS = [] # e.g. ["Points", "Selection"]
EVENT_DEBUG_EXCLUDE_EMITTERS = ["TransformChain", "Context"]
EVENT_DEBUG_INCLUDE_EVENTS = [] # e.g. ["set_data", "changed"]
EVENT_DEBUG_EXCLUDE_EVENTS = ["status", "position"]
EVENT_DEBUG_STACK_DEPTH = 20
# ──────────────────────────────────────────────────────────────
# _PYTEST_RAISE=1 will prevent pytest from handling exceptions.
# Use with a debugger that's set to break on "unhandled exceptions".
# https://github.com/pytest-dev/pytest/issues/7409
_PYTEST_RAISE=0
# set to 1 to simulate Continuous integration tests
CI=0
# set to 1 to allow tests that pop up a viewer or widget
NAPARI_POPUP_TESTS=0
# ──────────────────────────────────────────────────────────────
# You can also use any of the (nested) fields from NapariSettings
# for example:
# NAPARI_APPEARANCE_THEME='light'
napari-0.5.0a1/.gitattributes 0000664 0000000 0000000 00000000045 14370413656 0016113 0 ustar 00root root 0000000 0000000 napari_gui/_version.py export-subst
napari-0.5.0a1/.github/ 0000775 0000000 0000000 00000000000 14370413656 0014561 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/.github/CODEOWNERS 0000664 0000000 0000000 00000000000 14370413656 0016142 0 ustar 00root root 0000000 0000000 napari-0.5.0a1/.github/FUNDING.yml 0000664 0000000 0000000 00000000076 14370413656 0016401 0 ustar 00root root 0000000 0000000 github: numfocus
custom: http://numfocus.org/donate-to-napari
napari-0.5.0a1/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 14370413656 0016744 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/.github/ISSUE_TEMPLATE/bug_report.md 0000664 0000000 0000000 00000001243 14370413656 0021436 0 ustar 00root root 0000000 0000000 ---
name: "\U0001F41B Bug Report"
about: Submit a bug report to help us improve napari
title: ''
labels: bug
assignees: ''
---
## 🐛 Bug
## To Reproduce
Steps to reproduce the behavior:
1.
2.
3.
## Expected behavior
## Environment
- Please copy and paste the information at napari info option in help menubar here:
- Any other relevant information:
## Additional context
napari-0.5.0a1/.github/ISSUE_TEMPLATE/config.yml 0000664 0000000 0000000 00000000673 14370413656 0020742 0 ustar 00root root 0000000 0000000 # Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
blank_issues_enabled: true # default
contact_links:
- name: 🤷💻 napari forum
url: https://forum.image.sc/tag/napari
about: |
Please ask general "how do I ... ?" questions over at image.sc
- name: '💬 napari @ zulip'
url: https://napari.zulipchat.com/
about: Chat with devs
napari-0.5.0a1/.github/ISSUE_TEMPLATE/design_related.md 0000664 0000000 0000000 00000003230 14370413656 0022235 0 ustar 00root root 0000000 0000000 ---
name: "\U00002728 Design Related"
about: Capture needs specific to design and user experience research
title: ''
labels: design
assignees: liaprins-czi
---
### Overview of design need
- Is there an existing GitHub issue this design work pertains to? If so, provide a link to it
- Also link to any specific comments or threads where the problem to be solved by design is mentioned
- In a sentence or two, describe the problem to be solved for users
### What level of design is needed? (Choose all that apply)
_This section may be updated by the designer / UX researcher working on this issue_
- [ ] **User experience research:** high-level recommendation/exploration of user needs, design heuristics, and / or best practices to inform a design experience
(Use this option when you feel there’s a challenge to be solved, but you’re curious about what the experience should be — may involve research studies to understand challenges/opportunities for design)
- [ ] **Information flow / conceptual:** organizing and structuring of information flow and content, including layout on screen or across multiple steps
- [ ] **Visual:** creating mockups, icons, etc
(If choosing this level alone, it means that the content to be mocked up and its organization is already known and specified)
### Is design a blocker?
- [ ] **Yes:** engineering cannot proceed without a design first
- [ ] **No:** engineering can create a first version, and design can come in later to iterate and refine
If selecting **Yes**, how much design input is needed to unblock engineering? For example, is a full, final visual design needed, or just a recommendation of which conceptual direction to go?
napari-0.5.0a1/.github/ISSUE_TEMPLATE/documentation.md 0000664 0000000 0000000 00000000377 14370413656 0022146 0 ustar 00root root 0000000 0000000 ---
name: "\U0001F4DA Documentation"
about: Report an issue with napari documentation
title: ''
labels: documentation
assignees: ''
---
## 📚 Documentation
napari-0.5.0a1/.github/ISSUE_TEMPLATE/feature_request.md 0000664 0000000 0000000 00000001365 14370413656 0022476 0 ustar 00root root 0000000 0000000 ---
name: "\U0001F680 Feature Request"
about: Submit a proposal/request for a new napari feature
title: ''
labels: feature
assignees: ''
---
## 🚀 Feature
## Motivation
## Pitch
## Alternatives
## Additional context
napari-0.5.0a1/.github/ISSUE_TEMPLATE/task.md 0000664 0000000 0000000 00000000303 14370413656 0020224 0 ustar 00root root 0000000 0000000 ---
name: "\U0001F9F0 Task"
about: Submit a proposal/request for a new napari feature
title: ''
labels: task
assignees: ''
---
## 🧰 Task
napari-0.5.0a1/.github/PULL_REQUEST_TEMPLATE.md 0000664 0000000 0000000 00000003351 14370413656 0020364 0 ustar 00root root 0000000 0000000 # Description
## Type of change
- [ ] Bug-fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] This change requires a documentation update
# References
# How has this been tested?
- [ ] example: the test suite for my feature covers cases x, y, and z
- [ ] example: all tests pass with my change
- [ ] example: I check if my changes works with both PySide and PyQt backends
as there are small differences between the two Qt bindings.
## Final checklist:
- [ ] My PR is the minimum possible work for the desired functionality
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have made corresponding changes to the documentation
- [ ] I have added tests that prove my fix is effective or that my feature works
- [ ] If I included new strings, I have used `trans.` to make them localizable.
For more information see our [translations guide](https://napari.org/developers/translations.html).
napari-0.5.0a1/.github/TEST_FAIL_TEMPLATE.md 0000664 0000000 0000000 00000000625 14370413656 0017753 0 ustar 00root root 0000000 0000000 ---
title: "{{ env.TITLE }}"
labels: [bug]
---
The {{ workflow }} workflow failed on {{ date | date("YYYY-MM-DD HH:mm") }} UTC
The most recent failing test was on {{ env.PLATFORM }} py{{ env.PYTHON }} {{ env.BACKEND }}
with commit: {{ sha }}
Full run: https://github.com/napari/napari/actions/runs/{{ env.RUN_ID }}
(This post will be updated if another test fails, as long as this issue remains open.)
napari-0.5.0a1/.github/dependabot.yml 0000664 0000000 0000000 00000000624 14370413656 0017413 0 ustar 00root root 0000000 0000000 # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
commit-message:
prefix: "ci(dependabot):"
- package-ecosystem: "pip"
directory: "/resources"
schedule:
interval: "weekly"
target-branch: "develop"
napari-0.5.0a1/.github/labeler.yml 0000664 0000000 0000000 00000000632 14370413656 0016713 0 ustar 00root root 0000000 0000000 # See: .github/workflows/labeler.yml and https://github.com/marketplace/actions/labeler
design:
- 'napari/_qt/qt_resources/**/*'
preferences:
- 'napari/_qt/**/*/preferences_dialog.py'
- 'napari/settings/**/*.py'
qt:
- 'napari/_qt/**/*.py'
- 'napari/_qt/**/*.py'
- 'napari/qt/**/*.py'
- 'napari/qt/**/*.py'
task:
- '.github/**/*'
tests:
- '**/*/_tests/**/*.py'
vispy:
- 'napari/_vispy'
napari-0.5.0a1/.github/missing_translations.md 0000664 0000000 0000000 00000001126 14370413656 0021355 0 ustar 00root root 0000000 0000000 ---
title: "[Automatic issue] Missing `_.trans()`."
labels: "good first issue"
---
It looks like one of our test cron detected missing translations.
You can see the latest output [here](https://github.com/napari/napari/actions/workflows/test_translations.yml).
There are likely new strings to either ignore, or to internationalise.
You can also Update the cron script to update this issue with better information as well.
Note that this issue will be automatically updated if kept open, or a new one will be created when necessary, if no open
issue is found and new `_.trans` call are missing.
napari-0.5.0a1/.github/workflows/ 0000775 0000000 0000000 00000000000 14370413656 0016616 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/.github/workflows/auto_author_assign.yml 0000664 0000000 0000000 00000000446 14370413656 0023243 0 ustar 00root root 0000000 0000000 # https://github.com/marketplace/actions/auto-author-assign
name: 'Auto Author Assign'
on:
pull_request_target:
types: [opened, reopened]
permissions:
pull-requests: write
jobs:
assign-author:
runs-on: ubuntu-latest
steps:
- uses: toshimaru/auto-author-assign@v1.6.1
napari-0.5.0a1/.github/workflows/benchmarks.yml 0000664 0000000 0000000 00000015636 14370413656 0021471 0 ustar 00root root 0000000 0000000 # This CI configuration for relative benchmarks is based on the research done
# for scikit-image's implementation available here:
# https://github.com/scikit-image/scikit-image/blob/9bdd010a8/.github/workflows/benchmarks.yml#L1
# Blog post with the rationale: https://labs.quansight.org/blog/2021/08/github-actions-benchmarks/
name: Benchmarks
on:
pull_request:
types: [labeled]
schedule:
- cron: "6 6 * * 0" # every sunday
workflow_dispatch:
inputs:
base_ref:
description: "Baseline commit or git reference"
required: true
contender_ref:
description: "Contender commit or git reference"
required: true
# This is the main configuration section that needs to be fine tuned to napari's needs
# All the *_THREADS options is just to make the benchmarks more robust by not using parallelism
env:
OPENBLAS_NUM_THREADS: "1"
MKL_NUM_THREADS: "1"
OMP_NUM_THREADS: "1"
ASV_OPTIONS: "--split --show-stderr --factor 1.5 --attribute timeout=300"
# --split -> split final reports in tables
# --show-stderr -> print tracebacks if errors occur
# --factor 1.5 -> report anomaly if tested timings are beyond 1.5x base timings
# --attribute timeout=300 -> override timeout attribute (default=60s) to allow slow tests to run
# see https://asv.readthedocs.io/en/stable/commands.html#asv-continuous for more details!
jobs:
benchmark:
if: ${{ github.event.label.name == 'run-benchmarks' && github.event_name == 'pull_request' || github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }}
name: ${{ matrix.benchmark-name }}
runs-on: ${{ matrix.runs-on }}
strategy:
fail-fast: false
matrix:
include:
- benchmark-name: Qt
asv-command: continuous
selection-regex: "^benchmark_qt_.*"
runs-on: macos-latest
# Qt tests run on macOS to avoid using Xvfb business
# xvfb makes everything run, but some tests segfault :shrug:
# Fortunately, macOS graphics stack does not need xvfb!
- benchmark-name: non-Qt
asv-command: continuous
selection-regex: "^benchmark_(?!qt_).*"
runs-on: ubuntu-latest
steps:
# We need the full repo to avoid this issue
# https://github.com/actions/checkout/issues/23
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v4
name: Install Python
with:
python-version: "3.9"
cache-dependency-path: setup.cfg
- uses: tlambert03/setup-qt-libs@v1
- name: Setup asv
run: python -m pip install asv virtualenv
- uses: octokit/request-action@v2.x
id: latest_release
with:
route: GET /repos/{owner}/{repo}/releases/latest
owner: napari
repo: napari
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run ${{ matrix.benchmark-name }} benchmarks
id: run_benchmark
env:
# asv will checkout commits, which might contain LFS artifacts; ignore those errors since
# they are probably just documentation PNGs not needed here anyway
GIT_LFS_SKIP_SMUDGE: 1
run: |
set -euxo pipefail
# ID this runner
asv machine --yes
if [[ $GITHUB_EVENT_NAME == pull_request ]]; then
EVENT_NAME="PR #${{ github.event.pull_request.number }}"
BASE_REF=${{ github.event.pull_request.base.sha }}
CONTENDER_REF=${GITHUB_SHA}
echo "Baseline: ${BASE_REF} (${{ github.event.pull_request.base.label }})"
echo "Contender: ${CONTENDER_REF} (${{ github.event.pull_request.head.label }})"
elif [[ $GITHUB_EVENT_NAME == schedule ]]; then
EVENT_NAME="cronjob"
BASE_REF="${{ fromJSON(steps.latest_release.outputs.data).target_commitish }}"
CONTENDER_REF="${GITHUB_SHA}"
echo "Baseline: ${BASE_REF} (${{ fromJSON(steps.latest_release.outputs.data).tag_name }})"
echo "Contender: ${CONTENDER_REF} (current main)"
elif [[ $GITHUB_EVENT_NAME == workflow_dispatch ]]; then
EVENT_NAME="manual trigger"
BASE_REF="${{ github.event.inputs.base_ref }}"
CONTENDER_REF="${{ github.event.inputs.contender_ref }}"
echo "Baseline: ${BASE_REF} (workflow input)"
echo "Contender: ${CONTENDER_REF} (workflow input)"
fi
echo "EVENT_NAME=$EVENT_NAME" >> $GITHUB_ENV
echo "BASE_REF=$BASE_REF" >> $GITHUB_ENV
echo "CONTENDER_REF=$CONTENDER_REF" >> $GITHUB_ENV
# Run benchmarks for current commit against base
asv continuous $ASV_OPTIONS -b '${{ matrix.selection-regex }}' ${BASE_REF} ${CONTENDER_REF} \
| sed -E "/Traceback | failed$|PERFORMANCE DECREASED/ s/^/::error:: /" \
| tee asv_continuous.log
# Report and export results for subsequent steps
if grep "Traceback \|failed\|PERFORMANCE DECREASED" asv_continuous.log > /dev/null ; then
exit 1
fi
- name: Report Failures as Issue
if: ${{ (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && failure() }}
uses: JasonEtco/create-an-issue@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PLATFORM: ${{ matrix.runs-on }}
PYTHON: "3.9"
BACKEND: ${{ matrix.benchmark-name }}
RUN_ID: ${{ github.run_id }}
TITLE: "[test-bot] Benchmark tests failing"
with:
filename: .github/TEST_FAIL_TEMPLATE.md
update_existing: true
- name: Add more info to artifact
if: always()
run: |
# Copy the full `asv continuous` log
cp asv_continuous.log .asv/results/asv_continuous_${{ matrix.benchmark-name }}.log
# ensure that even if this isn't a PR, the benchmark_report workflow can run without error
touch .asv/results/message_${{ matrix.benchmark-name }}.txt
# Add the message that might be posted as a comment on the PR
# We delegate the actual comment to `benchmarks_report.yml` due to
# potential token permissions issues
if [[ $GITHUB_EVENT_NAME == pull_request ]]; then
echo "${{ github.event.pull_request.number }}" > .asv/results/pr_number
echo \
"The ${{ matrix.benchmark-name }} benchmark run requested by $EVENT_NAME ($CONTENDER_REF vs $BASE_REF) has" \
"finished with status '${{ steps.run_benchmark.outcome }}'. See the" \
"[CI logs and artifacts](||BENCHMARK_CI_LOGS_URL||) for further details." \
> .asv/results/message_${{ matrix.benchmark-name }}.txt
fi
- uses: actions/upload-artifact@v3
if: always()
with:
name: asv-benchmark-results-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
path: .asv/results
napari-0.5.0a1/.github/workflows/benchmarks_report.yml 0000664 0000000 0000000 00000005331 14370413656 0023053 0 ustar 00root root 0000000 0000000 # Report benchmark results to the PR
# We need a dual workflow to make sure the token has the needed permissions to post comments
# See https://stackoverflow.com/a/71683208 for more details
# When this workflow is triggered, it pulls the latest version of this file on
# the default branch. Changes to this file won't be reflected until after the
# PR is merged.
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run
name: "Benchmarks - Report"
on:
workflow_run:
workflows: [Benchmarks]
types:
- completed
jobs:
download:
runs-on: ubuntu-latest
steps:
- name: "Download artifact"
uses: actions/github-script@v6
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let artifactName = `asv-benchmark-results-${context.payload.workflow_run.id}-${context.payload.workflow_run.run_number}-${context.payload.workflow_run.run_attempt}`
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == artifactName
})[0];
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
let fs = require('fs');
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/asv_results.zip`, Buffer.from(download.data));
- name: Unzip and prepare data
run: |
unzip asv_results.zip
# combine the Qt and non-Qt messages
cat message_Qt.txt message_non-Qt.txt > message.txt
- name: Replace URLs
run: |
sed -i 's@||BENCHMARK_CI_LOGS_URL||@${{ github.event.workflow_run.html_url }}@g' message.txt
- name: Collect PR number if available
run: |
if [[ -f pr_number ]]; then
echo "PR_NUMBER=$(cat pr_number)" >> $GITHUB_ENV
fi
- name: "Comment on PR"
if: env.PR_NUMBER != ''
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
let fs = require('fs');
let issue_number = Number(process.env.PR_NUMBER);
let body = fs.readFileSync('message.txt', 'utf8');
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issue_number,
body: body,
});
napari-0.5.0a1/.github/workflows/build_docs.yml 0000664 0000000 0000000 00000003224 14370413656 0021451 0 ustar 00root root 0000000 0000000 name: Build PR Docs
on:
pull_request:
branches:
- main
push:
branches:
- docs
tags:
- 'v*'
workflow_dispatch:
jobs:
build-and-upload:
name: Build & Upload Artifact
runs-on: ubuntu-latest
steps:
- name: Clone docs repo
uses: actions/checkout@v3
with:
path: docs # place in a named directory
repository: napari/docs
- name: Clone main repo
uses: actions/checkout@v3
with:
path: napari-repo
- name: Copy examples to docs folder
run: |
cp -R napari-repo/examples docs
- uses: actions/setup-python@v4
with:
python-version: 3.9
cache-dependency-path: setup.cfg
- uses: tlambert03/setup-qt-libs@v1
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
python -m pip install "napari-repo/[all]"
- name: Testing
run: |
python -c 'import napari; print(napari.__version__)'
python -c 'import napari.layers; print(napari.layers.__doc__)'
- name: Build Docs
uses: aganders3/headless-gui@v1
env:
GOOGLE_CALENDAR_ID: ${{ secrets.GOOGLE_CALENDAR_ID }}
GOOGLE_CALENDAR_API_KEY: ${{ secrets.GOOGLE_CALENDAR_API_KEY }}
with:
# the napari-docs repo is cloned into a docs/ folder, hence the
# invocation below. Locally, you should simply run make docs
run: make -C docs docs GALLERY_PATH=../examples/
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: docs
path: docs/docs/_build
napari-0.5.0a1/.github/workflows/deploy_docs.yml 0000664 0000000 0000000 00000001105 14370413656 0021642 0 ustar 00root root 0000000 0000000 name: Build Docs
on:
push:
branches:
- main
workflow_dispatch:
concurrency:
group: docs-${{ github.ref }}
cancel-in-progress: true
jobs:
build-napari-docs:
name: Build docs on napari/docs
runs-on: ubuntu-latest
steps:
- name: Trigger workflow and wait
uses: convictional/trigger-workflow-and-wait@v1.6.5
with:
owner: napari
repo: docs
github_token: ${{ secrets.ACTIONS_DEPLOY_DOCS }}
workflow_file_name: deploy_docs.yml
trigger_workflow: true
wait_workflow: true
napari-0.5.0a1/.github/workflows/docker-singularity-publish.yml 0000664 0000000 0000000 00000010224 14370413656 0024623 0 ustar 00root root 0000000 0000000 name: Docker and Singularity build
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
on:
workflow_dispatch:
# schedule:
# - cron: '31 0 * * *'
push:
branches: [ main ]
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
env:
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
jobs:
build1:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
include:
- recipe: Docker
target: napari
image-name: napari/napari
- recipe: Docker
target: napari-xpra
image-name: napari/napari-xpra
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
# https://github.com/docker/build-push-action/blob/master/docs/advanced/tags-labels.md
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v4
with:
# list of Docker images to use as base name for tags
images: ${{ env.REGISTRY }}/${{ matrix.image-name }}
# images: |
# name/app
# ghcr.io/username/app
# generate Docker tags based on the following events/attributes
tags: |
type=schedule
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=sha
latest
# oras://ghcr.io is tagged latest too, and seems to override the docker://ghcr.io tag -> race condition?
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
file: "dockerfile"
target: ${{ matrix.target }}
# ----
build2:
needs: build1
runs-on: ubuntu-latest
container:
image: quay.io/singularity/docker2singularity:v3.10.0
options: --privileged
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
recipe: ["Singularity"]
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Continue if Singularity Recipe Exists
run: |
if [[ -f "${{ matrix.recipe }}" ]]; then
echo "keepgoing=true" >> $GITHUB_ENV
fi
- name: Build Container
if: ${{ env.keepgoing == 'true' }}
env:
recipe: ${{ matrix.recipe }}
run: |
ls
if [ -f "${{ matrix.recipe }}" ]; then
singularity build container.sif ${{ matrix.recipe }}
tag=latest
fi
# Build the container and name by tag
echo "Tag is $tag."
echo "tag=$tag" >> $GITHUB_ENV
- name: Login and Deploy Container
if: (github.event_name != 'pull_request')
env:
keepgoing: ${{ env.keepgoing }}
run: |
if [[ "${keepgoing}" == "true" ]]; then
echo ${{ secrets.GITHUB_TOKEN }} | singularity remote login -u ${{ secrets.GHCR_USERNAME }} --password-stdin oras://ghcr.io
singularity push container.sif oras://ghcr.io/${GITHUB_REPOSITORY}:${tag}
fi
napari-0.5.0a1/.github/workflows/labeler.yml 0000664 0000000 0000000 00000000376 14370413656 0020755 0 ustar 00root root 0000000 0000000 # https://github.com/marketplace/actions/labeler
name: "Pull Request Labeler"
on:
- pull_request_target
jobs:
triage:
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@main
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
napari-0.5.0a1/.github/workflows/make_bundle.yml 0000664 0000000 0000000 00000007731 14370413656 0021617 0 ustar 00root root 0000000 0000000 on:
push:
# Sequence of patterns matched against refs/tags
tags:
- "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
branches:
- main
pull_request:
branches:
- main
paths-ignore:
- 'docs/**'
schedule:
- cron: "0 0 * * *"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
concurrency:
group: create-bundle-${{ github.ref }}
cancel-in-progress: true
name: Create Bundle
jobs:
bundle:
name: Bundle ${{ matrix.platform }}
runs-on: ${{ matrix.platform }}
if: github.repository == 'napari/napari'
env:
GITHUB_TOKEN: ${{ github.token }}
DISPLAY: ":99.0"
strategy:
fail-fast: false
matrix:
include:
- platform: ubuntu-18.04
python-version: "3.9"
- platform: macos-latest
python-version: "3.9"
- platform: windows-latest
python-version: "3.8"
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache-dependency-path: setup.cfg
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e '.[bundle_build]'
- name: get tag / arch-suffix
shell: bash
run: |
VER=`python bundle.py --version`
echo "version=${VER}" >> $GITHUB_ENV
echo "Version: $VER"
ARCH_SUFFIX=`python bundle.py --arch`
echo "arch-suffix=${ARCH_SUFFIX}" >> $GITHUB_ENV
echo "Machine: ${ARCH_SUFFIX}"
- name: Make Bundle (Linux)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libdbus-1-3 libxkbcommon-x11-0 libxcb-icccm4 \
libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 \
libxcb-xinerama0 libxcb-xfixes0 libxcb-shape0 libqt5gui5
xvfb-run --auto-servernum python bundle.py
- name: Make Bundle (Windows & MacOS)
if: runner.os != 'Linux'
run: python bundle.py
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: napari-${{ env.version }}-${{ runner.os }}-${{ env.arch-suffix }}.zip
path: napari-${{ env.version }}-${{ runner.os }}-${{ env.arch-suffix }}.zip
- name: Get Release
if: startsWith(github.ref, 'refs/tags/v')
id: get_release
uses: bruceadams/get-release@v1.3.2
- name: Upload Release Asset
if: startsWith(github.ref, 'refs/tags/v')
uses: actions/upload-release-asset@v1
with:
upload_url: ${{ steps.get_release.outputs.upload_url }}
asset_path: napari-${{ env.version }}-${{ runner.os }}-${{ env.arch-suffix }}.zip
asset_name: napari-${{ env.version }}-${{ runner.os }}-${{ env.arch-suffix }}.zip
asset_content_type: application/zip
- name: Upload Nightly Build Asset
if: ${{ github.event_name == 'schedule' }}
uses: WebFreak001/deploy-nightly@v2.0.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
# nightly build release from https://api.github.com/repos/napari/napari/releases
upload_url: https://uploads.github.com/repos/napari/napari/releases/34273071/assets{?name,label}
release_id: 34273071
asset_path: napari-${{ env.version }}-${{ runner.os }}-${{ env.arch-suffix }}.zip
asset_name: napari-${{ runner.os }}-${{ env.arch-suffix }}.zip
asset_content_type: application/zip
max_releases: 1
- name: Update latest tag
uses: EndBug/latest-tag@latest
if: ${{ github.event_name == 'schedule' }}
with:
description: latest code released from nightly build
tag-name: latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
napari-0.5.0a1/.github/workflows/make_bundle_conda.yml 0000664 0000000 0000000 00000000651 14370413656 0022755 0 ustar 00root root 0000000 0000000 name: Conda
on:
push:
# Sequence of patterns matched against refs/tags
tags:
- "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
schedule:
- cron: "0 0 * * *"
# workflow_dispatch: # go to napari/packaging to trigger manual runs
jobs:
packaging:
uses: napari/packaging/.github/workflows/make_bundle_conda.yml@main
secrets: inherit
with:
event_name: ${{ github.event_name }}
napari-0.5.0a1/.github/workflows/make_release.yml 0000664 0000000 0000000 00000005135 14370413656 0021762 0 ustar 00root root 0000000 0000000 on:
push:
# Sequence of patterns matched against refs/tags
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
name: Create Release
jobs:
build:
name: Create Release
runs-on: ubuntu-latest
if: github.repository == 'napari/napari'
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: 3.9
cache-dependency-path: setup.cfg
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e .[build] # need full install so we can build type stubs
- name: Build Distribution
run: make dist
- name: Find Release Notes
id: release_notes
run: |
TAG="${GITHUB_REF/refs\/tags\/v/}" # clean tag
if [[ "$TAG" != *"rc"* ]]; then
VER="${TAG/rc*/}" # remove pre-release identifier
RELEASE_NOTES="$(cat docs/release/release_${VER//./_}.md)"
# https://github.community/t5/GitHub-Actions/set-output-Truncates-Multiline-Strings/m-p/38372/highlight/true#M3322
RELEASE_NOTES="${RELEASE_NOTES//'%'/'%25'}"
RELEASE_NOTES="${RELEASE_NOTES//$'\n'/'%0A'}"
RELEASE_NOTES="${RELEASE_NOTES//$'\r'/'%0D'}"
else
RELEASE_NOTES="pre-release $TAG"
fi
echo "tag=${TAG}" >> $GITHUB_ENV
# https://help.github.com/en/actions/reference/workflow-commands-for-github-actions
echo "::set-output name=contents::$RELEASE_NOTES"
- name: Create Release
id: create_release
uses: actions/create-release@latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token
with:
tag_name: ${{ github.ref }}
release_name: ${{ env.tag }}
body: ${{ steps.release_notes.outputs.contents }}
draft: false
prerelease: ${{ contains(github.ref, 'rc') }}
- name: Upload Release Asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./dist/napari-${{ env.tag }}.tar.gz
asset_name: napari-${{ env.tag }}.tar.gz
asset_content_type: application/gzip
- name: Publish PyPI Package
uses: pypa/gh-action-pypi-publish@master
with:
user: __token__
password: ${{ secrets.pypi_password }}
napari-0.5.0a1/.github/workflows/test_comprehensive.yml 0000664 0000000 0000000 00000012434 14370413656 0023253 0 ustar 00root root 0000000 0000000 # The Comprehensive test suite, which will be run anytime anything is merged into main.
# See test_pull_request.yml for the tests that will be run
name: Comprehensive Test
on:
push:
branches:
- main
- "v*x"
tags:
- "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
concurrency:
group: comprehensive-test
jobs:
manifest:
name: Check Manifest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Install dependencies
run: |
pip install --upgrade pip
pip install check-manifest
- name: Check Manifest
run: check-manifest
test:
name: ${{ matrix.platform }} py${{ matrix.python }} ${{ matrix.toxenv }} ${{ matrix.MIN_REQ && 'min_req' }}
runs-on: ${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform: [ubuntu-latest, windows-latest]
python: ["3.8", "3.9", "3.10"]
backend: [pyqt5, pyside2]
include:
- python: 3.9
platform: macos-latest
backend: pyqt5
# test with minimum specified requirements
- python: 3.8
platform: ubuntu-18.04
backend: pyqt5
MIN_REQ: 1
# test with --async_only
- python: 3.8
platform: ubuntu-18.04
toxenv: async-py38-linux-pyqt5
# test without any Qt backends
- python: 3.8
platform: ubuntu-18.04
toxenv: headless-py38-linux
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.11.0
with:
access_token: ${{ github.token }}
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
cache: "pip"
cache-dependency-path: setup.cfg
- uses: tlambert03/setup-qt-libs@v1
# strategy borrowed from vispy for installing opengl libs on windows
- name: Install Windows OpenGL
if: runner.os == 'Windows'
run: |
git clone --depth 1 https://github.com/pyvista/gl-ci-helpers.git
powershell gl-ci-helpers/appveyor/install_opengl.ps1
if (Test-Path -Path "C:\Windows\system32\opengl32.dll" -PathType Leaf) {Exit 0} else {Exit 1}
- name: Install dependencies
run: |
pip install --upgrade pip
pip install setuptools tox tox-gh-actions
python tools/minreq.py # no-op if MIN_REQ is not set
env:
MIN_REQ: ${{ matrix.MIN_REQ }}
# here we pass off control of environment creation and running of tests to tox
# tox-gh-actions, installed above, helps to convert environment variables into
# tox "factors" ... limiting the scope of what gets tested on each platform
# The one exception is if the "toxenv" environment variable has been set,
# in which case we are declaring one specific tox environment to run.
# see tox.ini for more
- name: Test with tox
uses: aganders3/headless-gui@v1
with:
run: python -m tox
env:
PLATFORM: ${{ matrix.platform }}
BACKEND: ${{ matrix.backend }}
TOXENV: ${{ matrix.toxenv }}
NUMPY_EXPERIMENTAL_ARRAY_FUNCTION: ${{ matrix.MIN_REQ || 1 }}
PYVISTA_OFF_SCREEN: True
MIN_REQ: ${{ matrix.MIN_REQ }}
- name: Coverage
uses: codecov/codecov-action@v3
- name: Report Failures
if: ${{ failure() }}
uses: JasonEtco/create-an-issue@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PLATFORM: ${{ matrix.platform }}
PYTHON: ${{ matrix.python }}
BACKEND: ${{ matrix.toxenv }}
RUN_ID: ${{ github.run_id }}
TITLE: "[test-bot] Comprehensive tests failing"
with:
filename: .github/TEST_FAIL_TEMPLATE.md
update_existing: true
test_pip_install:
name: ubuntu-latest 3.9 pip install
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
path: napari-from-github
- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: 3.9
cache: "pip"
cache-dependency-path: napari-from-github/setup.cfg
- uses: tlambert03/setup-qt-libs@v1
- name: Install this commit
run: |
pip install --upgrade pip
pip install ./napari-from-github[all,testing]
- name: Test
uses: aganders3/headless-gui@v1
with:
run: python -m pytest --pyargs napari --color=yes
test_examples:
name: test examples
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.9
- uses: tlambert03/setup-qt-libs@v1
- name: Install this commit
run: |
pip install --upgrade pip
pip install setuptools tox tox-gh-actions
- name: Test
uses: aganders3/headless-gui@v1
with:
run: tox -e py39-linux-pyside2-examples
napari-0.5.0a1/.github/workflows/test_prereleases.yml 0000664 0000000 0000000 00000004405 14370413656 0022715 0 ustar 00root root 0000000 0000000 # An "early warning" cron job that will install dependencies
# with `pip install --pre` periodically to test for breakage
# (and open an issue if a test fails)
name: --pre Test
on:
schedule:
- cron: '0 */12 * * *' # every 12 hours
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
test:
name: ${{ matrix.platform }} py${{ matrix.python }} ${{ matrix.backend }} --pre
runs-on: ${{ matrix.platform }}
if: github.repository == 'napari/napari'
strategy:
fail-fast: false
matrix:
platform: [windows-latest, macos-latest, ubuntu-latest]
python: [3.9]
backend: [pyqt5, pyside2]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
cache-dependency-path: setup.cfg
- uses: tlambert03/setup-qt-libs@v1
- name: Install Windows OpenGL
if: runner.os == 'Windows'
run: |
git clone --depth 1 https://github.com/pyvista/gl-ci-helpers.git
powershell gl-ci-helpers/appveyor/install_opengl.ps1
if (Test-Path -Path "C:\Windows\system32\opengl32.dll" -PathType Leaf) {Exit 0} else {Exit 1}
- name: Install dependencies
run: |
pip install --upgrade pip
pip install setuptools tox tox-gh-actions
- name: Test with tox
# run tests using pip install --pre
uses: aganders3/headless-gui@v1
with:
run: python -m tox -v --pre
env:
PLATFORM: ${{ matrix.platform }}
BACKEND: ${{ matrix.backend }}
PYVISTA_OFF_SCREEN: True # required for opengl on windows
# If something goes wrong, we can open an issue in the repo
- name: Report Failures
if: ${{ failure() }}
uses: JasonEtco/create-an-issue@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PLATFORM: ${{ matrix.platform }}
PYTHON: ${{ matrix.python }}
BACKEND: ${{ matrix.backend }}
RUN_ID: ${{ github.run_id }}
TITLE: '[test-bot] pip install --pre is failing'
with:
filename: .github/TEST_FAIL_TEMPLATE.md
update_existing: true
napari-0.5.0a1/.github/workflows/test_pull_requests.yml 0000664 0000000 0000000 00000014677 14370413656 0023326 0 ustar 00root root 0000000 0000000 # Our minimal suite of tests that run on each pull request
name: PR Test
on:
pull_request:
branches:
- main
- "v*x"
concurrency:
group: test-${{ github.ref }}
cancel-in-progress: true
jobs:
manifest:
# make sure all necessary files will be bundled in the release
name: Check Manifest
timeout-minutes: 15
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
cache-dependency-path: setup.cfg
cache: 'pip'
- name: Install Dependencies
run: pip install --upgrade pip
- name: Install Napari dev
run: pip install -e .[build]
- name: Check Manifest
run: |
make typestubs
make check-manifest
localization_syntax:
# make sure all necessary files will be bundled in the release
name: Check l18n syntax
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
- name: Check localization formatting
run: |
pip install --upgrade pip semgrep
# f"..." and f'...' are the same for semgrep
semgrep --error --lang python --pattern 'trans._(f"...")' napari
semgrep --error --lang python --pattern 'trans._($X.format(...))' napari
test:
name: ${{ matrix.platform }} ${{ matrix.python }} ${{ matrix.toxenv || matrix.backend }} ${{ matrix.MIN_REQ && 'min_req' }}
runs-on: ${{ matrix.platform }}
timeout-minutes: 40
strategy:
fail-fast: false
matrix:
platform: [ubuntu-latest]
python: ["3.8", "3.9", "3.10"]
backend: [pyqt5, pyside2]
include:
# Windows py38
- python: 3.8
platform: windows-latest
backend: pyqt5
- python: 3.8
platform: windows-latest
backend: pyside2
- python: 3.9
platform: macos-latest
backend: pyqt5
# minimum specified requirements
- python: 3.8
platform: ubuntu-18.04
backend: pyqt5
MIN_REQ: 1
# test with --async_only
- python: 3.8
platform: ubuntu-18.04
toxenv: async-pyqt5-py38-linux
# test without any Qt backends
- python: 3.8
platform: ubuntu-18.04
toxenv: headless-py38-linux
- python: 3.9
platform: ubuntu-latest
backend: pyqt6
- python: 3.9
platform: ubuntu-latest
backend: pyside6
- python: '3.10'
platform: ubuntu-latest
backend: pyside6
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.11.0
with:
access_token: ${{ github.token }}
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
cache: "pip"
cache-dependency-path: setup.cfg
- uses: tlambert03/setup-qt-libs@v1
# strategy borrowed from vispy for installing opengl libs on windows
- name: Install Windows OpenGL
if: runner.os == 'Windows'
run: |
git clone --depth 1 https://github.com/pyvista/gl-ci-helpers.git
powershell gl-ci-helpers/appveyor/install_opengl.ps1
if (Test-Path -Path "C:\Windows\system32\opengl32.dll" -PathType Leaf) {Exit 0} else {Exit 1}
# tox and tox-gh-actions will take care of the "actual" installation
# of python dependendencies into a virtualenv. see tox.ini for more
- name: Install dependencies
run: |
pip install --upgrade pip
pip install setuptools tox tox-gh-actions
python tools/minreq.py
env:
# tools/minreq.py sets all deps to their minumim stated versions
# it is a no-op if MIN_REQ is not set
MIN_REQ: ${{ matrix.MIN_REQ }}
# here we pass off control of environment creation and running of tests to tox
# tox-gh-actions, installed above, helps to convert environment variables into
# tox "factors" ... limiting the scope of what gets tested on each platform
# for instance, on ubuntu-latest with python 3.8, it would be equivalent to this command:
# `tox -e py38-linux-pyqt,py38-linux-pyside`
# see tox.ini for more
- name: Test with tox
# the longest is macos-latest 3.9 pyqt5 at ~30 minutes.
timeout-minutes: 35
uses: aganders3/headless-gui@v1
with:
run: python -m tox
env:
PLATFORM: ${{ matrix.platform }}
BACKEND: ${{ matrix.backend }}
TOXENV: ${{ matrix.toxenv }}
NUMPY_EXPERIMENTAL_ARRAY_FUNCTION: ${{ matrix.MIN_REQ || 1 }}
PYVISTA_OFF_SCREEN: True
MIN_REQ: ${{ matrix.MIN_REQ }}
- uses: actions/upload-artifact@v3
with:
name: upload pytest timing reports as json
path: |
./report-*.json
- name: Coverage
uses: codecov/codecov-action@v3
test_pip_install:
name: ubuntu-latest 3.9 pip install
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
path: napari-from-github
- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: 3.9
cache: "pip"
cache-dependency-path: napari-from-github/setup.cfg
- uses: tlambert03/setup-qt-libs@v1
- name: Install this commit
run: |
pip install --upgrade pip
pip install ./napari-from-github[all,testing]
- name: Test
uses: aganders3/headless-gui@v1
with:
run: |
python -m pytest --pyargs napari --color=yes
python -m pytest --pyargs napari_builtins --color=yes
test_examples:
name: test examples
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.9
cache-dependency-path: setup.cfg
- uses: tlambert03/setup-qt-libs@v1
- name: Install this commit
run: |
pip install --upgrade pip
pip install setuptools tox tox-gh-actions
- name: Test
uses: aganders3/headless-gui@v1
with:
run: tox -e py39-linux-pyside2-examples
napari-0.5.0a1/.github/workflows/test_translations.yml 0000664 0000000 0000000 00000001570 14370413656 0023124 0 ustar 00root root 0000000 0000000 name: Test translations
on:
schedule:
# * is a special character in YAML so you have to quote this string
- cron: '0 1 * * *'
workflow_dispatch:
jobs:
translations:
name: Check missing translations
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
uses: actions/setup-python@v4
with:
python-version: 3.9
cache-dependency-path: setup.cfg
- name: Install napari
run: |
pip install -e .[all]
pip install -e .[testing]
- name: Run check
run: |
python -m pytest -Wignore tools/ --tb=short
- uses: JasonEtco/create-an-issue@v2
if: ${{ failure() }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
filename: .github/missing_translations.md
update_existing: true
napari-0.5.0a1/.github/workflows/test_typing.yml 0000664 0000000 0000000 00000001017 14370413656 0021711 0 ustar 00root root 0000000 0000000 name: Test typing
on:
pull_request:
branches:
- main
jobs:
typing:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.9
cache-dependency-path: setup.cfg
- name: Install napari
run: |
pip install -r resources/requirements_mypy.txt
SETUPTOOLS_ENABLE_FEATURES="legacy-editable" pip install -e .[all]
- name: Run mypy on typed modules
run: make typecheck
napari-0.5.0a1/.github/workflows/test_vendored.yml 0000664 0000000 0000000 00000001777 14370413656 0022222 0 ustar 00root root 0000000 0000000 name: Test vendored
on:
schedule:
# * is a special character in YAML so you have to quote this string
- cron: '0 2 * * *'
jobs:
vendor:
name: Vendored
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Run check
id: check_v
run: python tools/check_vendored_modules.py --ci
- name: Create PR updating vendored modules
uses: peter-evans/create-pull-request@v4
with:
commit-message: Update vendored modules.
branch: update-vendored-examples
delete-branch: true
title: "[Automatic] Update ${{ steps.check_v.outputs.vendored }} vendored module"
body: |
This PR is automatically created and updated by napari GitHub
action cron to keep vendored modules up to date.
It look like ${{ steps.check_v.outputs.vendored }} has a new version.
napari-0.5.0a1/.gitignore 0000664 0000000 0000000 00000004412 14370413656 0015212 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
.dmypy.json
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
pip-wheel-metadata/
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/source/api/
docs/source/release/
docs/source/releases.rst
docs/build/
docs/_tags
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
# Pycharm files
.idea
# Liclipse
.project
.pydevproject
.settings/
# OS stuff
.DS_store
# Benchmarking results
.asv/
# VSCode
.vscode/
# emacs
*~
\#*\#
auto-save-list
tramp
.\#*
*_flymake.*
.projectile
.dir-locals.el
# these will get autogenerated
_qt_resources*.py
res.qrc
# ignore all generated themed svgs
napari/resources/themes
# briefcase
macOS/
linux/
windows/
napari/_version.py
docs/api/napari*
docs/_build
# built in setup.py
napari/view_layers.pyi
napari/components/viewer_model.pyi
# Autogenerated documentation
docs/images/_autogenerated/
docs/guides/preferences.md
docs/guides/_layer_events.md
docs/guides/_viewer_events.md
docs/guides/_layerlist_events.md
# come from npe2 docs
docs/plugins/_npe2_*.md
napari/settings/napari.schema.json
docs/jupyter_execute/
docs/.jupyter_cache/
docs/gallery/
# pytest reports in json format https://github.com/napari/napari/pull/4518
report*.json
napari/resources/icons/_themes/
# perfmon
tools/perfmon/*/traces-*.json
github_cache.sqlite
napari-0.5.0a1/.pre-commit-config.yaml 0000664 0000000 0000000 00000001436 14370413656 0017506 0 ustar 00root root 0000000 0000000 repos:
- repo: https://github.com/MarcoGorelli/absolufy-imports
rev: v0.3.1
hooks:
- id: absolufy-imports
exclude: _vendor|vendored|examples
- repo: https://github.com/hadialqattan/pycln
rev: v2.1.3
hooks:
- id: pycln
- repo: https://github.com/psf/black
rev: 22.12.0
hooks:
- id: black
pass_filenames: true
exclude: _vendor|vendored|examples
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.237
hooks:
- id: ruff
exclude: _vendor|vendored
- repo: https://github.com/seddonym/import-linter
rev: v1.7.0
hooks:
- id: import-linter
stages: [manual]
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.21.0
hooks:
- id: check-github-workflows
napari-0.5.0a1/CITATION.cff 0000664 0000000 0000000 00000006527 14370413656 0015125 0 ustar 00root root 0000000 0000000 # YAML 1.2
# Metadata for citation of this software according to the CFF format (https://citation-file-format.github.io/)
cff-version: 1.0.3
message: If you use this software, please cite it using these metadata.
title: 'napari: a multi-dimensional image viewer for Python'
doi: 10.5281/zenodo.3555620
authors:
- given-names: Nicholas
family-names: Sofroniew
affiliation: Chan Zuckerberg Initiative
orcid: "https://orcid.org/0000-0002-3426-0914"
- given-names: Talley
family-names: Lambert
affiliation: Harvard Medical School
orcid: "https://orcid.org/0000-0002-2409-0181"
- given-names: Kira
family-names: Evans
affiliation: Chan Zuckerberg Initiative
- given-names: Juan
family-names: Nunez-Iglesias
affiliation: Biomedicine Discovery Institute, Monash University
- given-names: Grzegorz
family-names: Bokota
affiliation: University of Warsaw, Faculty of Mathematics, Informatics, and Mechanics
orcid: https://orcid.org/0000-0002-5470-1676
- given-names: Philip
family-names: Winston
affiliation: Tobeva Software
- given-names: Gonzalo
family-names: Peña-Castellanos
affiliation: Quansight, Inc. / SIHSA Ltda
- given-names: Kevin
family-names: Yamauchi
affiliation: Iber Lab - ETH Zürich
- given-names: Matthias
family-names: Bussonnier
orcid: "http://orcid.org/0000-0002-7636-8632"
affiliation: Quansight Labs
- given-names: Draga
family-names: Doncila Pop
- given-names: Ahmet
family-names: Can Solak
affiliation: Chan Zuckerberg Biohub
- given-names: Ziyang
family-names: Liu
affiliation: Chan Zuckerberg Initiative Foundation
- given-names: Pam
family-names: Wadhwa
affiliation: Quansight Labs
- given-names: Alister
family-names: Burt
affiliation: MRC-LMB
- given-names: Genevieve
family-names: Buckley
affiliation: Monash University
orcid: https://orcid.org/0000-0003-2763-492X
- given-names: Andrew
family-names: Sweet
affiliation: Chan Zuckerberg Initiative
- given-names: Lukasz
family-names: Migas
affiliation: Delft University of Technology
- given-names: Volker
family-names: Hilsenstein
affiliation: EMBL Heidelberg, Germany
orcid: https://orcid.org/0000-0002-2255-2960
- given-names: Lorenzo
family-names: Gaifas
affiliation: Gutsche Lab - University of Grenoble
orcid: https://orcid.org/0000-0003-4875-9422
- given-names: Jordão
family-names: Bragantini
affiliation: Chan Zuckerberg Biohub
- given-names: Jaime
family-names: Rodríguez-Guerra
affiliation: Quansight Labs
- given-names: Hector
family-names: Muñoz
affiliation: University of California, Los Angeles
orcid: https://orcid.org/0000-0001-7851-2549
- given-names: Jeremy
family-names: Freeman
affiliation: Chan Zuckerberg Initiative
- given-names: Peter
family-names: Boone
- given-names: Alan
family-names: Lowe
name-particle: R
affiliation: UCL & The Alan Turing Institute
- given-names: Christoph
family-names: Gohlke
affiliation: University of California, Irvine
- given-names: Loic
family-names: Royer
affiliation: Chan Zuckerberg Biohub
- given-names: Andrea
family-names: Pierré
affiliation: Brown University
orcid: "https://orcid.org/0000-0003-4501-5428"
- given-names: Hagai
family-names: Har-Gil
affiliation: Tel Aviv University, Israel
- given-names: Abigail
family-names: McGovern
affiliation: Monash University
repository-code: https://github.com/napari/napari
license: BSD-3-Clause
napari-0.5.0a1/EULA.md 0000664 0000000 0000000 00000200214 14370413656 0014270 0 ustar 00root root 0000000 0000000 ## Notice of Third Party Software Licenses
napari may be [installed][napari_installers] through a variety of methods. Particularly, the
bundled installers may include third party software packages or tools licensed under different
terms. These licenses may be accessed from within the resulting napari installation or
https://napari.org.
[napari_installers]: https://napari.org/stable/#installation
Intel® OpenMP
```
Intel Simplified Software License (Version August 2021)
Use and Redistribution. You may use and redistribute the software (the
"Software"), without modification, provided the following conditions are met:
* Redistributions must reproduce the above copyright notice and the following
terms of use in the Software and in the documentation and/or other materials
provided with the distribution.
* Neither the name of Intel nor the names of its suppliers may be used to
endorse or promote products derived from this Software without specific
prior written permission.
* No reverse engineering, decompilation, or disassembly of this Software is
permitted.
No other licenses. Except as provided in the preceding section, Intel grants no
licenses or other rights by implication, estoppel or otherwise to, patent,
copyright, trademark, trade name, service mark or other intellectual property
licenses or rights of Intel.
Third party software. The Software may contain Third Party Software. "Third
Party Software" is open source software, third party software, or other Intel
software that may be identified in the Software itself or in the files (if any)
listed in the "third-party-software.txt" or similarly named text file included
with the Software. Third Party Software, even if included with the distribution
of the Software, may be governed by separate license terms, including without
limitation, open source software license terms, third party software license
terms, and other Intel software license terms. Those separate license terms
solely govern your use of the Third Party Software, and nothing in this license
limits any rights under, or grants rights that supersede, the terms of the
applicable license terms.
DISCLAIMER. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT ARE
DISCLAIMED. THIS SOFTWARE IS NOT INTENDED FOR USE IN SYSTEMS OR APPLICATIONS
WHERE FAILURE OF THE SOFTWARE MAY CAUSE PERSONAL INJURY OR DEATH AND YOU AGREE
THAT YOU ARE FULLY RESPONSIBLE FOR ANY CLAIMS, COSTS, DAMAGES, EXPENSES, AND
ATTORNEYS' FEES ARISING OUT OF ANY SUCH USE, EVEN IF ANY CLAIM ALLEGES THAT
INTEL WAS NEGLIGENT REGARDING THE DESIGN OR MANUFACTURE OF THE SOFTWARE.
LIMITATION OF LIABILITY. IN NO EVENT WILL INTEL BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
No support. Intel may make changes to the Software, at any time without notice,
and is not obligated to support, update or provide training for the Software.
Termination. Your right to use the Software is terminated in the event of your
breach of this license.
Feedback. Should you provide Intel with comments, modifications, corrections,
enhancements or other input ("Feedback") related to the Software, Intel will be
free to use, disclose, reproduce, license or otherwise distribute or exploit the
Feedback in its sole discretion without any obligations or restrictions of any
kind, including without limitation, intellectual property rights or licensing
obligations.
Compliance with laws. You agree to comply with all relevant laws and regulations
governing your use, transfer, import or export (or prohibition thereof) of the
Software.
Governing law. All disputes will be governed by the laws of the United States of
America and the State of Delaware without reference to conflict of law
principles and subject to the exclusive jurisdiction of the state or federal
courts sitting in the State of Delaware, and each party agrees that it submits
to the personal jurisdiction and venue of those courts and waives any
objections. The United Nations Convention on Contracts for the International
Sale of Goods (1980) is specifically excluded and will not apply to the
Software.
```
Intel® Math Kernel Library
```
Intel End User License Agreement for Developer Tools (Version October 2021)
IMPORTANT NOTICE - PLEASE READ AND AGREE BEFORE DOWNLOADING, INSTALLING, COPYING
OR USING
This Agreement is between you, or the company or other legal entity that you
represent and warrant you have the legal authority to bind, (each, "You" or
"Your") and Intel Corporation and its subsidiaries (collectively, "Intel")
regarding Your use of the Materials. By downloading, installing, copying or
using the Materials, You agree to be bound by the terms of this Agreement. If
You do not agree to the terms of this Agreement, or do not have legal authority
or required age to agree to them, do not download, install, copy or use the
Materials.
1. LICENSE DEFINITIONS.
A. "Cloud Provider" means a third party service provider offering a cloud-based
platform, infrastructure, application or storage services, such as Microsoft
Azure or Amazon Web Services, which You may only utilize to host the
Materials subject to the restrictions set forth in Section 2.3 B.
B. "Derivative Work" means a derivative work, as defined in 17 U.S.C. 101, of
the Source Code.
C. "Executable Code" means computer programming code in binary form suitable
for machine execution by a processor without the intervening steps of
interpretation or compilation.
D. "Materials" mean the software, documentation, the software product serial
number, and other collateral, including any updates, made available to You
by Intel under this Agreement. Materials include Redistributables,
Executable Code, Source Code, Sample Source Code, and Pre-Release Materials,
but do not include Third Party Software.
E. "Pre-Release Materials" mean the Materials, or portions of the Materials,
that are identified (in the product release notes, on Intel's download
website for the Materials or elsewhere) or labeled as pre-release,
prototype, alpha or beta code and, as such, are deemed to be pre-release
code (i) which may not be fully functional or tested and may contain bugs or
errors; (ii) which Intel may substantially modify in its development of a
production version; or (iii) for which Intel makes no assurances that it
will ever develop or make a production version generally available.
Pre-Release Materials are subject to the terms of Section 3.2.
F. "Reciprocal Open Source Software" means any software that is subject to a
license which requires that (i) it must be distributed in source code form;
(ii) it must be licensed under the same open source license terms; and (iii)
its derivative works must be licensed under the same open source license
terms. Examples of this type of license are the GNU General Public License
or the Mozilla Public License.
G. "Redistributables" mean the files (if any) listed in the "redist.txt,"
"redist-rt.txt" or similarly-named text files that may be included in the
Materials. Redistributables include Sample Source Code.
H. "Sample Source Code" means those portions of the Materials that are Source
Code and are identified as sample code. Sample Source Code may not have been
tested or validated by Intel and is provided purely as a programming example.
I. "Source Code" means the software portion of the Materials provided in human
readable format.
J. "Third Party Software" mean the files (if any) listed in the
"third-party-software.txt" or other similarly-named text file that may be
included in the Materials for the applicable software. Third Party Software
is subject to the terms of Section 2.2.
K. "Your Product" means one or more applications, products or projects
developed by or for You using the Materials.
2. LICENSE GRANTS.
2.1 License to the Materials. Subject to the terms and conditions of this
Agreement, Intel grants You a non-exclusive, worldwide, non-assignable,
non-sublicensable, limited right and license under its copyrights, to:
A. reproduce internally a reasonable number of copies of the Materials for Your
personal or business use;
B. use the Materials solely for Your personal or business use to develop Your
Product, in accordance with the documentation included as part of the
Materials;
C. modify or create Derivative Works only of the Redistributables, or any
portions, that are provided to You in Source Code;
D. distribute (directly and through Your distributors, resellers, and other
channel partners, if applicable), the Redistributables, including any
modifications to or Derivative Works of the Redistributables or any portions
made pursuant to Section 2.1.C subject to the following conditions:
(1) Any distribution of the Redistributables must only be as part of Your
Product which must add significant primary functionality different than
that of the Redistributables themselves;
(2) You must only distribute the Redistributables originally provided to You
by Intel only in Executable Code subject to a license agreement that
prohibits reverse engineering, decompiling or disassembling the
Redistributables;
(3) This distribution right includes a limited right to sublicense only the
Intel copyrights in the Redistributables and only to the extent necessary
to perform, display, and distribute the Redistributables (including Your
modifications and Derivative Works of the Redistributables provided in
Source Code) solely as incorporated in Your Product; and
(4) You: (i) will be solely responsible to Your customers for any update,
support obligation or other obligation or liability which may arise from
the distribution of Your Product, (ii) will not make any statement that
Your Product is "certified" or that its performance is guaranteed by Intel
or its suppliers, (iii) will not use Intel's or its suppliers' names or
trademarks to market Your Product, (iv) will comply with any additional
restrictions which are included in the text files with the
Redistributables and in Section 3 below, (v) will indemnify, hold
harmless, and defend Intel and its suppliers from and against any claims
or lawsuits, costs, damages, and expenses, including attorney's fees, that
arise or result from (a) Your modifications or Derivative Works of the
Materials or (b) Your distribution of Your Product.
2.2 Third Party Software. Third Party Software, even if included with the
distribution of the Materials, may be governed by separate license terms,
including without limitation, third party license terms, open source
software notices and terms, and/or other Intel software license terms. These
separate license terms solely govern Your use of the Third Party Software.
2.3 Third Party Use.
A. If You are an entity, Your contractors may use the Materials under the
license specified in Section 2, provided: (i) their use of the Materials is
solely on behalf of and in support of Your business, (ii) they agree to the
terms and conditions of this Agreement, and (iii) You are solely responsible
for their use, misuse or disclosure of the Materials.
B. You may utilize a Cloud Provider to host the Materials for You, provided:
(i) the Cloud Provider may only host the Materials for Your exclusive use
and may not use the Materials for any other purpose whatsoever, including the
restriction set forth in Section 3.1(xi); (ii) the Cloud Provider's use of
the Materials must be solely on behalf of and in support of Your Product, and
(iii) You will indemnify, hold harmless, and defend Intel and its suppliers
from and against any claims or lawsuits, costs, damages, and expenses,
including attorney's fees, that arise or result from Your Cloud Provider's
use, misuse or disclosure of the Materials.
3. LICENSE CONDITIONS.
3.1 Restrictions. Except as expressly provided in this Agreement, You may NOT:
(i) use, reproduce, disclose, distribute, or publicly display the
Materials; (ii) share, publish, rent or lease the Materials to any third
party; (iii) assign this Agreement or transfer the Materials; (iv) modify,
adapt, or translate the Materials in whole or in part; (v) reverse engineer,
decompile, or disassemble the Materials, or otherwise attempt to derive the
source code for the software; (vi) work around any technical limitations in
the Materials; (vii) distribute, sublicense or transfer any Source Code,
modifications or Derivative Works of any Source Code to any third party;
(viii) remove, minimize, block or modify any notices of Intel or its
suppliers in the Materials; (ix) include the Redistributables in malicious,
deceptive, or unlawful programs or products or use the Materials in any way
that is against the law; (x) modify, create a Derivative Work, link, or
distribute the Materials so that any part of it becomes Reciprocal Open
Source Software; (xi) use the Materials directly or indirectly for SaaS
services or service bureau purposes (i.e., a service that allows use of or
access to the Materials by a third party as part of that service, such as
the salesforce.com service business model).
3.2 Pre-Release Materials. If You receive Pre-Release Materials, You may
reproduce a reasonable number of copies and use the Pre-Release Materials
for evaluation and testing purposes only. You may not (i) modify or
incorporate the Pre-Release Materials into Your Product; (ii) continue to
use the Pre-Release Materials once a commercial version is released; or
(iii) disclose to any third party any benchmarks, performance results, or
other information relating to the Pre-Release Materials. Intel may waive
these restrictions in writing at its sole discretion; however, if You decide
to use the Pre-Release Materials in Your Product (even with Intel's waiver),
You acknowledge and agree that You are fully responsible for any and all
issues that result from such use.
3.3 Safety-Critical, and Life-Saving Applications; Indemnity. The Materials may
provide information relevant to safety-critical applications
("Safety-Critical Applications") to allow compliance with functional safety
standards or requirements. You acknowledge and agree that safety is Your
responsibility. To the extent You use the Materials to create, or as part
of, products used in Safety-Critical Applications, it is Your responsibility
to design, manage, and ensure that there are system-level safeguards to
anticipate, monitor, and control system failures, and You agree that You are
solely responsible for all applicable regulatory standards and
safety-related requirements concerning Your use of the Materials in Safety
Critical Applications.
Should You use the Materials for Safety-Critical Applications or in any type
of a system or application in which the failure of the Materials could
create a situation where personal injury or death may occur (e.g., medical
systems, life-sustaining or life-saving systems) ("Life-Saving
Applications"), You agree to indemnify, defend, and hold Intel and its
representatives harmless against any claims or lawsuits, costs, damages, and
expenses, including reasonable attorney fees, arising in any way out of Your
use of the Materials in Safety-Critical Applications or Life-Saving
Applications and claims of product liability, personal injury or death
associated with those applications; even if such claims allege that Intel
was negligent or strictly liable regarding the design or manufacture of the
Materials or its failure to warn regarding the Materials.
3.4 Media Format Codecs and Digital Rights Management. You acknowledge and agree
that Your use of the Materials or distribution of the Redistributables with
Your Product as permitted by this Agreement may require You to procure
license(s) from third parties that may hold intellectual property rights
applicable to any media decoding, encoding or transcoding technology (e.g.,
the use of an audio or video codec) and/or digital rights management
capabilities of the Materials, if any. Should any such additional licenses
be required, You are solely responsible for obtaining any such licenses and
agree to obtain any such licenses at Your own expense.
4. DATA COLLECTION AND PRIVACY.
4.1 Data Collection. The Materials may generate and collect anonymous data
and/or provisioning data about the Materials and/or the development
environment and transmit the data to Intel as a one-time event during
installation. Optional data may also be collected by the Materials, however,
You will be provided notice of the request to collect optional data and no
optional data will be collected without Your consent. All data collection by
Intel is performed pursuant to relevant privacy laws, including notice and
consent requirements.
4.2 Intel's Privacy Notice. Intel is committed to respecting Your privacy. To
learn more about Intel's privacy practices, please visit
http://www.intel.com/privacy.
5. OWNERSHIP. Title to the Materials and all copies remain with Intel or its
suppliers. The Materials are protected by intellectual property rights,
including without limitation, United States copyright laws and international
treaty provisions. You will not remove any copyright or other proprietary
notices from the Materials. Except as expressly provided herein, no license
or right is granted to You directly or by implication, inducement, estoppel
or otherwise; specifically, Intel does not grant any express or implied right
to You under Intel patents, copyrights, trademarks, or trade secrets.
6. NO WARRANTY AND NO SUPPORT.
6.1 No Warranty. Disclaimer. Intel disclaims all warranties of any kind and the
terms and remedies provided in this Agreement are instead of any other
warranty or condition, express, implied or statutory, including those
regarding merchantability, fitness for any particular purpose,
non-infringement or any warranty arising out of any course of dealing, usage
of trade, proposal, specification or sample. Intel does not assume (and does
not authorize any person to assume on its behalf) any liability.
6.2 No Support; Priority Support. Intel may make changes to the Materials, or to
items referenced therein, at any time without notice, but is not obligated
to support, update or provide training for the Materials under the terms of
this Agreement. Intel offers free community and paid priority support
options. More information on these support options can be found at:
https://software.intel.com/content/www/us/en/develop/support/priority-support.html.
7. LIMITATION OF LIABILITY.
7.1 Intel will not be liable for any of the following losses or damages (whether
such losses or damages were foreseen, foreseeable, known or otherwise): (i)
loss of revenue; (ii) loss of actual or anticipated profits; (iii) loss of
the use of money; (iv) loss of anticipated savings; (v) loss of business;
(vi) loss of opportunity; (vii) loss of goodwill; (viii) loss of use of the
Materials; (ix) loss of reputation; (x) loss of, damage to, or corruption of
data; or (xi) any indirect, incidental, special or consequential loss of
damage however caused (including loss or damage of the type specified in
this Section 7).
7.2 Intel's total cumulative liability to You, including for direct damages for
claims relating to this Agreement, and whether for breach of contract,
negligence, or for any other reason, will not exceed $100.
7.3 You acknowledge that the limitations of liability provided in this Section 7
are an essential part of this Agreement. You agree that the limitations of
liability provided in this Agreement with respect to Intel will be conveyed
to and made binding upon any customer of Yours that acquires the
Redistributables.
8. USER SUBMISSIONS. Should you provide Intel with comments, modifications,
corrections, enhancements or other input ("Feedback") related to the
Materials, Intel will be free to use, disclose, reproduce, license or
otherwise distribute or exploit the Feedback in its sole discretion without
any obligations or restrictions of any kind, including without limitation,
intellectual property rights or licensing obligations. If You wish to provide
Intel with information that You intend to be treated as confidential
information, Intel requires that such confidential information be provided
pursuant to a non-disclosure agreement ("NDA"); please contact Your Intel
representative to ensure the proper NDA is in place.
9. NON-DISCLOSURE. Information provided by Intel to You may include information
marked as confidential. You must treat such information as confidential under
the terms of the applicable NDA between Intel and You. If You have not
entered into an NDA with Intel, You must not disclose, distribute or make use
of any information marked as confidential, except as expressly authorized in
writing by Intel. Intel retains all rights in and to its confidential
information specifications, designs, engineering details, discoveries,
inventions, patents, copyrights, trademarks, trade secrets, and other
proprietary rights relating to the Materials. Any breach by You of the
confidentiality obligations provided for in this Section 9 will cause
irreparable injury to Intel for which money damages may be inadequate to
compensate Intel for losses arising from such a breach. Intel may obtain
equitable relief, including injunctive relief, if You breach or threaten to
breach Your confidentiality obligations.
10. TERM AND TERMINATION. This Agreement becomes effective on the date You
accept this Agreement and will continue until terminated as provided for in
this Agreement. The term for any Pre-Release Materials terminates upon
release of a commercial version. This Agreement will terminate if You are in
breach of any of its terms and conditions. Upon termination, You will
promptly destroy the Materials and all copies. In the event of termination of
this Agreement, Your license to any Redistributables distributed by You in
accordance with the terms and conditions of this Agreement, prior to the
effective date of such termination, will survive any such termination of this
Agreement. Sections 1, 2.1.D(4)(v), 2.2, 2.3.A(iii), 2.3.B(iii), 3.3, 5, 6,
7, 8, 9, 10 (with respect to these survival provisions in the last sentence),
and 12 will survive expiration or termination of this Agreement.
11. U.S. GOVERNMENT RESTRICTED RIGHTS. The technical data and computer software
covered by this license is a "Commercial Item," as such term is defined by
the FAR 2.101 (48 C.F.R. 2.101) and is "commercial computer software" and
"commercial computer software documentation" as specified under FAR 12.212
(48 C.F.R. 12.212) or DFARS 227.7202 (48 C.F.R. 227.7202), as applicable.
This commercial computer software and related documentation is provided to
end users for use by and on behalf of the U.S. Government with only those
rights as are granted to all other end users pursuant to the terms and
conditions of this Agreement.
12. GENERAL PROVISIONS.
12.1 ENTIRE AGREEMENT. This Agreement contains the complete and exclusive
agreement and understanding between the parties concerning the subject
matter of this Agreement, and supersedes all prior and contemporaneous
proposals, agreements, understanding, negotiations, representations,
warranties, conditions, and communications, oral or written, between the
parties relating to the same subject matter. Each party acknowledges and
agrees that in entering into this Agreement it has not relied on, and will
not be entitled to rely on, any oral or written representations,
warranties, conditions, understanding, or communications between the
parties that are not expressly set forth in this Agreement. The express
provisions of this Agreement control over any course of performance, course
of dealing, or usage of the trade inconsistent with any of the provisions
of this Agreement. The provisions of this Agreement will prevail
notwithstanding any different, conflicting, or additional provisions that
may appear on any purchase order, acknowledgement, invoice, or other
writing issued by either party in connection with this Agreement. No
modification or amendment to this Agreement will be effective unless in
writing and signed by authorized representatives of each party, and must
specifically identify this Agreement by its title and version (e.g., "Intel
End User License Agreement for Developer Tools (Version October 2021)");
except that Intel may make changes to this Agreement as it distributes new
versions of the Materials. When changes are made, Intel will make a new
version of the Agreement available on its website. If You received a copy
of this Agreement translated into another language, the English language
version of this Agreement will prevail in the event of any conflict between
versions.
12.2 EXPORT. You acknowledge that the Materials and all related technical
information are subject to export controls and you agree to comply with all
laws and regulations of the United States and other applicable governments
governing export, re-export, import, transfer, distribution, and use of the
Materials. In particular, but without limitation, the Materials may not be
exported or re-exported (i) into any U.S. embargoed countries or (ii) to
any person or entity listed on a denial order published by the U.S.
government or any other applicable governments. By using the Materials, You
represent and warrant that You are not located in any such country or on
any such list. You also agree that You will not use the Materials for, or
sell or transfer them to a third party who is known or suspected to be
involved in, any purposes prohibited by the U.S. government or other
applicable governments, including, without limitation, the development,
design, manufacture, or production of nuclear, missile, chemical or
biological weapons.
12.3 GOVERNING LAW, JURISDICTION, AND VENUE. All disputes arising out of or
related to this Agreement, whether based on contract, tort, or any other
legal or equitable theory, will in all respects be governed by, and
construed and interpreted under, the laws of the United States of America
and the State of Delaware, without reference to conflict of laws
principles. The parties agree that the United Nations Convention on
Contracts for the International Sale of Goods (1980) is specifically
excluded from and will not apply to this Agreement. All disputes arising
out of or related to this Agreement, whether based on contract, tort, or
any other legal or equitable theory, will be subject to the exclusive
jurisdiction of the courts of the State of Delaware or of the Federal
courts sitting in that State. Each party submits to the personal
jurisdiction of those courts and waives all objections to that jurisdiction
and venue for those disputes.
12.4 SEVERABILITY. The parties intend that if a court holds that any provision
or part of this Agreement is invalid or unenforceable under applicable law,
the court will modify the provision to the minimum extent necessary to make
it valid and enforceable, or if it cannot be made valid and enforceable,
the parties intend that the court will sever and delete the provision or
part from this Agreement. Any change to or deletion of a provision or part
of this Agreement under this Section will not affect the validity or
enforceability of the remainder of this Agreement, which will continue in
full force and effect.
```
UCRT (Redistributable files for Windows SDK)
```
MICROSOFT SOFTWARE LICENSE TERMS
MICROSOFT WINDOWS SOFTWARE DEVELOPMENT KIT (SDK) FOR WINDOWS 10
_______________________________________________________________________________________________________
These license terms are an agreement between Microsoft Corporation (or based on where you live, one of its affiliates) and you. Please read them. They apply
to the software named above, which includes the media on which you received it, if any. The terms also apply to any Microsoft
• APIs (i.e., APIs included with the installation of the SDK or APIs accessed by installing extension packages or service to use with the SDK),
• updates,
• supplements,
• internet-based services, and
• support services
for this software, unless other terms accompany those items. If so, those terms apply.
By using the software, you accept these terms. If you do not accept them, do not use the software.
As described below, using some features also operates as your consent to the transmission of certain standard computer information for Internet-based
services.
________________________________________________________________________________________________
If you comply with these license terms, you have the rights below.
1. INSTALLATION AND USE RIGHTS.
a. You may install and use any number of copies of the software on your devices to design, develop and test your programs that run on a Microsoft
operating system. Further, you may install, use and/or deploy via a network management system or as part of a desktop image, any number of copies of the
software on computer devices within your internal corporate network to design, develop and test your programs that run on a Microsoft operating system. Each
copy must be complete, including all copyright and trademark notices. You must require end users to agree to terms that protect the software as much as these
license terms.
b. Utilities. The software contains certain components that are identified in the Utilities List located at
http://go.microsoft.com/fwlink/?LinkId=524839. Depending on the specific edition of the software, the number of Utility files you receive with the software
may not be equal to the number of Utilities listed in the Utilities List. Except as otherwise provided on the Utilities List for specific files, you may
copy and install the Utilities you receive with the software on to other third party machines. These Utilities may only be used to debug and deploy your
programs and databases you have developed with the software. You must delete all the Utilities installed onto a third party machine within the earlier of (i)
when you have finished debugging or deploying your programs; or (ii) thirty (30) days after installation of the Utilities onto that machine. We may add
additional files to this list from time to time.
c. Build Services and Enterprise Build Servers. You may install and use any number of copies of the software onto your build machines or servers,
solely for the purpose of:
i. Compiling, building, verifying and archiving your programs;
ii. Creating and configuring build systems internal to your organization to support your internal build environment; or
iii. Enabling a service for third parties to design, develop and test programs or services that run on a Microsoft operating system.
d. Included Microsoft Programs. The software contains other Microsoft programs. The license terms with those programs apply to your use of them.
e. Third Party Notices. The software may include third party code that Microsoft, not the third party, licenses to you under this agreement. Notices,
if any, for the third party code are included for your information only. Notices, if any, for this third party code are included with the software and may be
located at http://aka.ms/thirdpartynotices.
f.
2. ADDITIONAL LICENSING REQUIREMENTS AND/OR USE RIGHTS.
a. Distributable Code. The software contains code that you are permitted to distribute in programs you develop if you comply with the terms below.
i. Right to Use and Distribute. The code and test files listed below are “Distributable Code”.
• REDIST.TXT Files. You may copy and distribute the object code form of code listed in REDIST.TXT files plus the files listed on the
REDIST.TXT list located at http://go.microsoft.com/fwlink/?LinkId=524842. Depending on the specific edition of the software, the number of REDIST files you
receive with the software may not be equal to the number of REDIST files listed in the REDIST.TXT List. We may add additional files to the list from time to
time.
• Third Party Distribution. You may permit distributors of your programs to copy and distribute the Distributable Code as part of those
programs.
ii. Distribution Requirements. For any Distributable Code you distribute, you must
• Add significant primary functionality to it in your programs;
• For any Distributable Code having a filename extension of .lib, distribute only the results of running such Distributable Code through a
linker with your program;
• Distribute Distributable Code included in a setup program only as part of that setup program without modification;
• Require distributors and external end users to agree to terms that protect it at least as much as this agreement;
• For Distributable Code from the Windows Performance Toolkit portions of the software, distribute the unmodified software package as a whole
with your programs, with the exception of the KernelTraceControl.dll and the WindowsPerformanceRecorderControl.dll which can be distributed with your
programs;
• Display your valid copyright notice on your programs; and
• Indemnify, defend, and hold harmless Microsoft from any claims, including attorneys’ fees, related to the distribution or use of your
programs.
iii. Distribution Restrictions. You may not
• Alter any copyright, trademark or patent notice in the Distributable Code;
• Use Microsoft’s trademarks in your programs’ names or in a way that suggests your programs come from or are endorsed by Microsoft;
• Distribute partial copies of the Windows Performance Toolkit portion of the software package with the exception of the
KernelTraceControl.dll and the WindowsPerformanceRecorderControl.dll which can be distributed with your programs;
• Distribute Distributable Code to run on a platform other than the Microsoft operating system platform;
• Include Distributable Code in malicious, deceptive or unlawful programs; or
• Modified or distribute the source code of any Distributable Code so that any part of it becomes subject to an Excluded License. And Excluded
License is on that requir3es, as a condition of use, modification or distribution, that
▪ The code be disclosed or distributed in source code form; or
▪ Others have the right to modify it.
b. Additional Rights and Restrictions for Features made Available with the Software.
i. Windows App Requirements. If you intend to make your program available in the Windows Store, the program must comply with the Certification
Requirements as defined and described in the App Developer Agreement, currently available at:
https://msdn.microsoft.com/en-us/library/windows/apps/hh694058.aspx.
ii. Bing Maps. The software may include features that retrieve content such as maps, images and other data through the Bing Maps (or successor
branded) application programming interface (the “Bing Maps API”) to create reports displaying data on top of maps, aerial and hybrid imagery. If these
features are included, you may use these features to create and view dynamic or static documents only in conjunction with and through methods and means of
access integrated in the software. You may not otherwise copy, store, archive, or create a database of the entity information including business names,
addresses and geocodes available through the Bing Maps API. You may not use the Bing Maps API to provide sensor based guidance/routing, nor use any Road
Traffic Data or Bird’s Eye Imager (or associated metadata) even if available through the Bing Maps API for any purpose. Your use of the Bing Maps API and
associated content is also subject to the additional terms and conditions at http://go.microsoft.com/fwlink/?LinkId=21969.
iii. Additional Mapping APIs. The software may include application programming interfaces that provide maps and other related mapping features and
services that are not provided by Bing (the “Additional Mapping APIs”). These Additional Mapping APIs are subject to additional terms and conditions and may
require payment of fees to Microsoft and/or third party providers based on the use or volume of use of such Additional Mapping APIs. These terms and
conditions will be provided when you obtain any necessary license keys to use such Additional Mapping APIs or when you review or receive documentation related
to the use of such Additional Mapping APIs.
iv. Push Notifications. The Microsoft Push Notification Service may not be used to send notifications that are mission critical or otherwise could
affect matters of life or death, including without limitation critical notifications related to a medical device or condition. MICROSOFT EXPRESSLY DISCLAIMS
ANY WARRANTIES THAT THE USE OF THE MICROSOFT PUSH NOTIFICATION SERVICE OR DELIVERY OF MICROSOFT PUSH NOTIFICATION SERVICE NOTIFICATIONS WILL BE UNINTERRUPTED,
ERROR FREE, OR OTHERWISE GUARANTEED TO OCCUR ON A REAL-TIME BASIS.
v. Speech namespace API. Using speech recognition functionality via the Speech namespace APIs in a program requires the support of a speech
recognition service. The service may require network connectivity at the time of recognition (e.g., when using a predefined grammar). In addition, the service
may also collect speech-related data in order to provide and improve the service. The speech-related data may include, for example, information related to
grammar size and string phrases in a grammar.
vi. Also, in order for a user to use speech recognition on the phone they must first accept certain terms of use. The terms of use notify the
user that data related to their use of the speech recognition service will be collected and used to provide and improve the service. If a user does not accept
the terms of use and speech recognition is attempted by the application, the operation will not work and an error will be returned to the application.
vii. PlayReady Support. The software may include the Windows Emulator, which contains Microsoft’s PlayReady content access technology. Content
owners use Microsoft PlayReady content access technology to protect their intellectual property, including copyrighted content. This software uses PlayReady
technology to access PlayReady-protected content and/or WMDRM-protected content. Microsoft may decide to revoke the software’s ability to consume
PlayReady-protected content for reasons including but not limited to (i) if a breach or potential breach of PlayReady technology occurs, (ii) proactive
robustness enhancement, and (iii) if Content owners require the revocation because the software fails to properly enforce restrictions on content usage.
Revocation should not affect unprotected content or content protected by other content access technologies. Content owners may require you to upgrade
PlayReady to access their content. If you decline an upgrade, you will not be able to access content that requires the upgrade and may not be able to install
other operating system updates or upgrades.
viii. Package Managers. The software may include package managers, like NuGet, that give you the option to download other Microsoft and third
party software packages to use with your application. Those packages are under their own licenses, and not this agreement. Microsoft does not distribute,
license or provide any warranties for any of the third party packages.
ix. Font Components. While the software is running, you may use its fonts to display and print content. You may only embed fonts in content as
permitted by the embedding restrictions in the fonts; and temporarily download them to a printer or other output device to help print content.
x. Notice about the H.264/AVD Visual Standard, and the VC-1 Video Standard. This software may include H.264/MPEG-4 AVC and/or VD-1 decoding
technology. MPEG LA, L.L.C. requires this notice:
c. THIS PRODUCT IS LICENSED UNDER THE AVC AND THE VC-1 PATENT PORTFOLIO LICENSES FOR THE PERSONAL AND NON-COMMERCIAL USE OF A CONSUMER TO (i) ENCODE
VIDEO IN COMPLIANCE WITH THE ABOVE STANDARDS (“VIDEO STANDARDS”) AND/OR (ii) DECODE AVC, AND VC-1 VIDEO THAT WAS ENCODED BY A CONSUMER ENGAGED IN A PERSONAL
AND NON-COMMERCIAL ACTIVITY AND/OR WAS OBTAINED FROM A VIDEO PROVIDER LICENSED TO PROVIDE SUCH VIDEO. NONE OF THE LICENSES EXTEND TO ANY OTHER PRODUCT
REGARDLESS OF WHETHER SUCH PRODUCT IS INCLUDED WITH THIS SOFTWARE IN A SINGLE ARTICLE. NO LICENSE IS GRANTED OR SHALL BE IMPLIED FOR ANY OTHER USE. ADDITIONAL
INFORMATION MAY BE OBTAINED FROM MPEG LA, L.L.C. SEE WWW.MPEGLA.COM.
d. For clarification purposes, this notice does not limit or inhibit the use of the software for normal business uses that are personal to that
business which do not include (i) redistribution of the software to third parties, or (ii) creation of content with the VIDEO STANDARDS compliant technologies
for distribution to third parties.
e. INTERNET-BASED SERVICES. Microsoft provides Internet-based services with the software. It may change or cancel them at any time.
f. Consent for Internet-Based Services. The software features described below and in the privacy statement at
http://go.microsoft.com/fwlink/?LinkId=521839 connect to Microsoft or service provider computer systems over the Internet. In some cases, you will not receive
a separate notice when they connect. In some cases, you may switch off these features or not use them as described in the applicable product documentation. By
using these features, you consent to the transmission of this information. Microsoft does not use the information to identify or contact you.
i. Computer Information. The following features use Internet protocols, which send to the appropriate systems computer information, such as your
Internet protocol address, the type of operating system, browser, and name and version of the software you are using, and the language code of the device
where you installed the software. Microsoft uses this information to make the Internet-based services available to you.
• Software Use and Performance. This software collects info about your hardware and how you use the software and automatically sends error
reports to Microsoft. These reports include information about problems that occur in the software. Reports might unintentionally contain personal
information. For example, a report that contains a snapshot of computer memory might include your name. Part of a document you were working on could be
included as well, but this information in reports or any info collected about hardware or your software use will not be used to identify or contact you.
• Digital Certificates. The software uses digital certificates. These digital certificates confirm the identity of Internet users sending
X.509 standard encryption information. They also can be used to digitally sign files and macros to verify the integrity and origin of the file contents. The
software retrieves certificates and updates certificate revocation lists using the Internet, when available.
• Windows Application Certification Kit. To ensure you have the latest certification tests, when launched this software periodically checks a
Windows Application Certification Kit file on download.microsft.com to see if an update is available. If an update is found, you are prompted and provided a
link to a web site where you can download the update. You may use the Windows Application Certification Kit solely to test your programs before you submit
them for a potential Microsoft Windows Certification and for inclusion on the Microsoft Windows Store. The results you receive are for informational purposes
only. Microsoft has no obligation to either (i) provide you with a Windows Certification for your programs and/or ii) include your program in the Microsoft
Windows Store.
• Microsoft Digital Rights Management for Silverlight.
• If you use Silverlight to access content that has been protected with Microsoft Digital Rights Management (DRM), in order to let you play
the content, the software may automatically
• request media usage rights from a rights server on the Internet and
• download and install available DRM Updates.
• For more information about this feature, including instructions for turning the Automatic Updates off, go to
http://go.microsoft.com/fwlink/?LinkId=147032.
1. Web Content Features. Features in the software can retrieve related content from Microsoft and provide it to you. To provide the content,
these features send to Microsoft the type of operating system, name and version of the software you are using, type of browser and language code of the device
where you installed the software. Examples of these features are clip art, templates, online training, online assistance, help and Appshelp. You may choose
not to use these web content features.
ii. Use of Information. We may use nformation collected about software use and performance to provide and improve Microsoft software and services
as further described in Microsoft’s Privacy Statement available at: https://go.microsoft.com/fwlink/?LinkID=521839. We may also share it with others, such as
hardware and software vendors. They may use the information to improve how their products run with Microsoft software.
iii. Misuse of Internet-based Services. You may not use these services in any way that could harm them or impair anyone else’s use of them. You
may not use the services to try to gain unauthorized access to any service, data, account or network by any means.
3. YOUR COMPLIANCE WITH PRIVACY AND DATA PROTECTION LAWS.
a. Personal Information Definition. "Personal Information" means any information relating to an identified or identifiable natural person; an
identifiable natural person is one who can be identified, directly or indirectly, in particular by reference to an identifier such as a name, an
identification number, location data, an online identifier or to one or more factors specific to the physical, physiological, genetic, mental, economic,
cultural or social identity of that natural person.
b. Collecting Personal Information using Packaged and Add-on APIs. If you use any API to collect personal information from the software, you must
comply with all laws and regulations applicable to your use of the data accessed through APIs including without limitation laws related to privacy, biometric
data, data protection, and confidentiality of communications. Your use of the software is conditioned upon implementing and maintaining appropriate
protections and measures for your applications and services, and that includes your responsibility to the data obtained through the use of APIs. For the data
you obtained through any APIs, you must:
i. obtain all necessary consents before collecting and using data and only use the data for the limited purposes to which the user consented,
including any consent to changes in use;
ii. In the event you’re storing data, ensure that data is kept up to date and implement corrections, restrictions to data, or the deletion of data
as updated through packaged or add-on APIs or upon user request if required by applicable law;
iii. implement proper retention and deletion policies, including deleting all data when as directed by your users or as required by applicable
law; and
iv. maintain and comply with a written statement available to your customers that describes your privacy practices regarding data and information
you collect, use and that you share with any third parties.
c. Location Framework. The software may contain a location framework component or APIs that enable support of location services in programs.
Programs that receive device location must comply with the requirements related to the Location Service APIs as described in the Microsoft Store Policies
(https://docs.microsoft.com/en-us/legal/windows/agreements/store-policies). If you choose to collect device location data outside of the control of Windows
system settings, you must obtain legally sufficient consent for your data practices, and such practices must comply with all other applicable laws and
regulations.
d. Security. If your application or service collects, stores or transmits personal information, it must do so securely, by using modern cryptography
methods.
4. BACKUP COPY. You may make one backup copy of the software. You may use it only to reinstall the software.
5. DOCUMENTATION. Any person that has valid access to your computer or internal network may copy and use the documentation for your internal, reference
purposes.
6. SCOPE OF LICENSE. The software is licensed, not sold. This agreement only gives you some rights to use the software. Microsoft reserves all other
rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing
so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not
• Except for the Microsoft .NET Framework, you must obtain Microsoft's prior written approval to disclose to a third party the results of any benchmark
test of the software.
• work around any technical limitations in the software;
• reverse engineer, decompile or disassemble the software, except and only to the extent that applicable law expressly permits, despite this limitation;
• make more copies of the software than specified in this agreement or allowed by applicable law, despite this limitation;
• publish the software for others to copy;
• rent, lease or lend the software;
• transfer the software or this agreement to any third party; or
• use the software for commercial software hosting services.
7. EXPORT RESTRICTIONS. The software is subject to United States export laws and regulations. You must comply with all domestic and international export
laws and regulations that apply to the software. These laws include restrictions on destinations, end users and end use. For additional information, see
www.microsoft.com/exporting.
8. SUPPORT SERVICES. Because this software is “as is,” we may not provide support services for it.
9. ENTIRE AGREEMENT. This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire
agreement for the software and support services.
10. INDEPENDENT PARTIES. Microsoft and you are independent contractors. Nothing in this agreement shall be construed as creating an employer-employee
relationship, processor-subprocessor relationship, a partnership, or a joint venture between the parties.
11. APPLICABLE LAW AND PLACE TO RESOLVE DISPUTES. If you acquired the software in the United States or Canada, the laws of the state or province where you
live (or, if a business, where your principal place of business is located) govern the interpretation of this agreement, claims for its breach, and all other
claims (including consumer protection, unfair competition, and tort claims), regardless of conflict of laws principles. If you acquired the software in any
other country, its laws apply. If U.S. federal jurisdiction exists, you and Microsoft consent to exclusive jurisdiction and venue in the federal court in King
County, Washington for all disputes heard in court. If not, you and Microsoft consent to exclusive jurisdiction and venue in the Superior Court of King
County, Washington for all disputes heard in court.
12. LEGAL EFFECT. This agreement describes certain legal rights. You may have other rights under the laws of your country. You may also have rights with
respect to the party from whom you acquired the software. This agreement does not change your rights under the laws of your country if the laws of your
country do not permit it to do so.
13. DISCLAIMER OF WARRANTY. The software is licensed “as-is.” You bear the risk of using it. Microsoft gives no express warranties, guarantees or
conditions. You may have additional consumer rights or statutory guarantees under your local laws which this agreement cannot change. To the extent permitted
under your local laws, Microsoft excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement.
FOR AUSTRALIA – You have statutory guarantees under the Australian Consumer Law and nothing in these terms is intended to affect those rights.
14. LIMITATION ON AND EXCLUSION OF REMEDIES AND DAMAGES. You can recover from Microsoft and its suppliers only direct damages up to U.S. $5.00. You cannot
recover any other damages, including consequential, lost profits, special, indirect or incidental damages.
This limitation applies to
• anything related to the software, services, content (including code) on third party Internet sites, or third party programs; and
• claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by
applicable law.
It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you
because your country may not allow the exclusion or limitation of incidental, consequential or other damages.
Please note: As this software is distributed in Quebec, Canada, some of the clauses in this agreement are provided below in French.
Remarque : Ce logiciel étant distribué au Québec, Canada, certaines des clauses dans ce contrat sont fournies ci-dessous en français.
EXONÉRATION DE GARANTIE. Le logiciel visé par une licence est offert « tel quel ». Toute utilisation de ce logiciel est à votre seule risque et péril.
Microsoft n’accorde aucune autre garantie expresse. Vous pouvez bénéficier de droits additionnels en vertu du droit local sur la protection des consommateurs,
que ce contrat ne peut modifier. La ou elles sont permises par le droit locale, les garanties implicites de qualité marchande, d’adéquation à un usage
particulier et d’absence de contrefaçon sont exclues.
LIMITATION DES DOMMAGES-INTÉRÊTS ET EXCLUSION DE RESPONSABILITÉ POUR LES DOMMAGES. Vous pouvez obtenir de Microsoft et de ses fournisseurs une indemnisation
en cas de dommages directs uniquement à hauteur de 5,00 $ US. Vous ne pouvez prétendre à aucune indemnisation pour les autres dommages, y compris les dommages
spéciaux, indirects ou accessoires et pertes de bénéfices.
Crete limitation concern:
• tout ce qui est relié au logiciel, aux services ou au contenu (y compris le code) figurant sur des sites Internet tiers ou dans des programmes tiers ;
et
• les réclamations au titre de violation de contrat ou de garantie, ou au titre de responsabilité stricte, de négligence ou d’une autre faute dans la
limite autorisée par la loi en vigueur.
Elle s’applique également, même si Microsoft connaissait ou devrait connaître l’éventualité d’un tel dommage. Si votre pays n’autorise pas l’exclusion ou la
limitation de responsabilité pour les dommages indirects, accessoires ou de quelque nature que ce soit, il se peut que la limitation ou l’exclusion ci-dessus
ne s’appliquera pas à votre égard.
EFFET JURIDIQUE. Le présent contrat décrit certains droits juridiques. Vous pourriez avoir d’autres droits prévus par les lois de votre pays. Le présent
contrat ne modifie pas les droits que vous confèrent les lois de votre pays si celles-ci ne le permettent pas.
***************
EULAID:WIN10SDK.RTM.AUG_2018_en-US
*************************************************************************
```
Microsoft Visual C++ 2019 Runtime
```
MICROSOFT SOFTWARE LICENSE TERMS
MICROSOFT VISUAL C++ 2019 RUNTIME
These license terms are an agreement between Microsoft Corporation (or
based on where you live, one of its affiliates) and you. They apply to
the software named above. The terms also apply to any Microsoft services
or updates for the software, except to the extent those have different
terms.
IF YOU COMPLY WITH THESE LICENSE TERMS, YOU HAVE THE RIGHTS BELOW.
- INSTALLATION AND USE RIGHTS.
- You may install and use any number of copies of the software.
- TERMS FOR SPECIFIC COMPONENTS.
- MICROSOFT PLATFORMS. The software may include components from
Microsoft Windows; Microsoft Windows Server; Microsoft SQL
Server; Microsoft Exchange; Microsoft Office; and Microsoft
SharePoint. These components are governed by separate agreements
and their own product support policies, as described in the
Microsoft “Licenses” folder accompanying the software, except
that, if license terms for those components are also included in
the associated installation directory, those license terms
control.
- THIRD PARTY COMPONENTS. The software may include third party
components with separate legal notices or governed by other
agreements, as may be described in the ThirdPartyNotices file(s)
accompanying the software.
- SCOPE OF LICENSE. The software is licensed, not sold. This agreement
only gives you some rights to use the software. Microsoft reserves
all other rights. Unless applicable law gives you more rights
despite this limitation, you may use the software only as expressly
permitted in this agreement. In doing so, you must comply with any
technical limitations in the software that only allow you to use it
in certain ways. You may not
- work around any technical limitations in the software;
- reverse engineer, decompile or disassemble the software, or
otherwise attempt to derive the source code for the software
except, and only to the extent required by third party licensing
terms governing the use of certain open source components that
may be included in the software;
- remove, minimize, block or modify any notices of Microsoft or
its suppliers in the software;
- use the software in any way that is against the law; or
- share, publish, rent or lease the software, or provide the
software as a stand-alone offering for others to use, or
transfer the software or this agreement to any third party.
- EXPORT RESTRICTIONS. You must comply with all domestic and
international export laws and regulations that apply to the
software, which include restrictions on destinations, end users, and
end use. For further information on export restrictions, visit
www.microsoft.com/exporting.
- SUPPORT SERVICES. Because this software is “as is,” we may not
provide support services for it.
- ENTIRE AGREEMENT. This agreement, and the terms for supplements,
updates, Internet-based services and support services that you use,
are the entire agreement for the software and support services.
- APPLICABLE LAW. If you acquired the software in the United States,
Washington law applies to interpretation of and claims for breach of
this agreement, and the laws of the state where you live apply to
all other claims. If you acquired the software in any other country,
its laws apply.
- CONSUMER RIGHTS; REGIONAL VARIATIONS. This agreement describes
certain legal rights. You may have other rights, including consumer
rights, under the laws of your state or country. Separate and apart
from your relationship with Microsoft, you may also have rights with
respect to the party from which you acquired the software. This
agreement does not change those other rights if the laws of your
state or country do not permit it to do so. For example, if you
acquired the software in one of the below regions, or mandatory
country law applies, then the following provisions apply to you:
- AUSTRALIA. You have statutory guarantees under the Australian
Consumer Law and nothing in this agreement is intended to affect
those rights.
- CANADA. If you acquired this software in Canada, you may stop
receiving updates by turning off the automatic update feature,
disconnecting your device from the Internet (if and when you
re-connect to the Internet, however, the software will resume
checking for and installing updates), or uninstalling the
software. The product documentation, if any, may also specify
how to turn off updates for your specific device or software.
- GERMANY AND AUSTRIA.
(i) WARRANTY. The properly licensed software will perform
substantially as described in any Microsoft materials that
accompany the software. However, Microsoft gives no contractual
guarantee in relation to the licensed software.
(ii) LIMITATION OF LIABILITY. In case of intentional conduct,
gross negligence, claims based on the Product Liability Act, as
well as, in case of death or personal or physical injury,
Microsoft is liable according to the statutory law.
- Subject to the foregoing clause (ii), Microsoft will only be
liable for slight negligence if Microsoft is in breach of such
material contractual obligations, the fulfillment of which
facilitate the due performance of this agreement, the breach of
which would endanger the purpose of this agreement and the
compliance with which a party may constantly trust in (so-called
"cardinal obligations"). In other cases of slight negligence,
Microsoft will not be liable for slight negligence.
- DISCLAIMER OF WARRANTY. THE SOFTWARE IS LICENSED “AS-IS.” YOU BEAR
THE RISK OF USING IT. MICROSOFT GIVES NO EXPRESS WARRANTIES,
GUARANTEES OR CONDITIONS. TO THE EXTENT PERMITTED UNDER YOUR LOCAL
LAWS, MICROSOFT EXCLUDES THE IMPLIED WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
- LIMITATION ON AND EXCLUSION OF DAMAGES. YOU CAN RECOVER FROM
MICROSOFT AND ITS SUPPLIERS ONLY DIRECT DAMAGES UP TO U.S. $5.00.
YOU CANNOT RECOVER ANY OTHER DAMAGES, INCLUDING CONSEQUENTIAL, LOST
PROFITS, SPECIAL, INDIRECT OR INCIDENTAL DAMAGES.
This limitation applies to (a) anything related to the software,
services, content (including code) on third party Internet sites, or
third party applications; and (b) claims for breach of contract,
breach of warranty, guarantee or condition, strict liability,
negligence, or other tort to the extent permitted by applicable law.
It also applies even if Microsoft knew or should have known about
the possibility of the damages. The above limitation or exclusion
may not apply to you because your country may not allow the
exclusion or limitation of incidental, consequential or other
damages.
```
napari-0.5.0a1/LICENSE 0000664 0000000 0000000 00000002742 14370413656 0014233 0 ustar 00root root 0000000 0000000 BSD 3-Clause License
Copyright (c) 2018, Napari
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
napari-0.5.0a1/MANIFEST.in 0000664 0000000 0000000 00000001217 14370413656 0014760 0 ustar 00root root 0000000 0000000 include LICENSE
include *.cff
graft napari/_vendor
recursive-include napari *.pyi
recursive-include napari _tests/*.py
recursive-include napari_builtins _tests/*.py
recursive-include napari *.pyi
recursive-include napari *.png *.svg *.qss *.gif *.ico *.icns
recursive-include napari *.yaml
# explicit excludes to keep check-manifest happy and remind us that
# these things are not being included unless we ask
recursive-exclude tools *
recursive-exclude napari *.pyc
exclude napari/benchmarks/*
recursive-exclude resources *
recursive-exclude binder *
recursive-exclude examples *
exclude bundle.py
exclude dockerfile
exclude EULA.md
exclude Singularity
napari-0.5.0a1/Makefile 0000664 0000000 0000000 00000002236 14370413656 0014664 0 ustar 00root root 0000000 0000000 .PHONY: typestubs pre watch dist settings-schema
typestubs:
python -m napari.utils.stubgen
# note: much faster to run mypy as daemon,
# dmypy run -- ...
# https://mypy.readthedocs.io/en/stable/mypy_daemon.html
typecheck:
mypy napari/settings napari/types.py napari/plugins
check-manifest:
pip install -U check-manifest
check-manifest
dist: typestubs check-manifest
pip install -U build
python -m build
settings-schema:
python -m napari.settings._napari_settings
pre:
pre-commit run -a
# If the first argument is "watch"...
ifeq (watch,$(firstword $(MAKECMDGOALS)))
# use the rest as arguments for "watch"
WATCH_ARGS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS))
# ...and turn them into do-nothing targets
$(eval $(WATCH_ARGS):;@:)
endif
# examples:
# make watch ~/Desktop/Untitled.png
# make watch -- -w animation # -- is required for passing flags to napari
watch:
@echo "running: napari $(WATCH_ARGS)"
@echo "Save any file to restart napari\nCtrl-C to stop..\n" && \
watchmedo auto-restart -R \
--ignore-patterns="*.pyc*" -D \
--signal SIGKILL \
napari -- $(WATCH_ARGS) || \
echo "please run 'pip install watchdog[watchmedo]'"
napari-0.5.0a1/README.md 0000664 0000000 0000000 00000016752 14370413656 0014513 0 ustar 00root root 0000000 0000000 # napari
### multi-dimensional image viewer for python
[](https://mybinder.org/v2/gh/napari/napari/main?urlpath=%2Fdesktop)
[](https://forum.image.sc/tag/napari)
[](https://github.com/napari/napari/raw/main/LICENSE)
[](https://cirrus-ci.com/napari/napari)
[](https://codecov.io/gh/napari/napari)
[](https://python.org)
[](https://pypi.org/project/napari)
[](https://pypistats.org/packages/napari)
[](https://en.wikipedia.org/wiki/Software_release_life_cycle#Alpha)
[](https://github.com/python/black)
[](https://zenodo.org/badge/latestdoi/144513571)
**napari** is a fast, interactive, multi-dimensional image viewer for Python. It's designed for browsing, annotating, and analyzing large multi-dimensional images. It's built on top of Qt (for the GUI), vispy (for performant GPU-based rendering), and the scientific Python stack (numpy, scipy).
We're developing **napari** in the open! But the project is in an **alpha** stage, and there will still likely be **breaking changes** with each release. You can follow progress on [this repository](https://github.com/napari/napari), test out new versions as we release them, and contribute ideas and code.
We're working on [tutorials](https://napari.org/tutorials/), but you can also quickly get started by looking below.
## installation
It is recommended to install napari into a virtual environment, like this:
```sh
conda create -y -n napari-env -c conda-forge python=3.9
conda activate napari-env
python -m pip install "napari[all]"
```
If you prefer conda over pip, you can replace the last line with: `conda install -c conda-forge napari`
See here for the full [installation guide](https://napari.org/tutorials/fundamentals/installation.html), including how to [install napari as a bundled app](https://napari.org/tutorials/fundamentals/installation.html#install-as-a-bundled-app).
## simple example
(The examples below require the `scikit-image` package to run. We just use data samples from this package for demonstration purposes. If you change the examples to use your own dataset, you may not need to install this package.)
From inside an IPython shell, you can open up an interactive viewer by calling
```python
from skimage import data
import napari
viewer = napari.view_image(data.cells3d(), channel_axis=1, ndisplay=3)
```

To use napari from inside a script, use `napari.run()`:
```python
from skimage import data
import napari
viewer = napari.view_image(data.cells3d(), channel_axis=1, ndisplay=3)
napari.run() # start the "event loop" and show the viewer
```
## features
Check out the scripts in our [`examples` folder](examples) to see some of the functionality we're developing!
**napari** supports six main different layer types, `Image`, `Labels`, `Points`, `Vectors`, `Shapes`, and `Surface`, each corresponding to a different data type, visualization, and interactivity. You can add multiple layers of different types into the viewer and then start working with them, adjusting their properties.
All our layer types support n-dimensional data and the viewer provides the ability to quickly browse and visualize either 2D or 3D slices of the data.
**napari** also supports bidirectional communication between the viewer and the Python kernel, which is especially useful when launching from jupyter notebooks or when using our built-in console. Using the console allows you to interactively load and save data from the viewer and control all the features of the viewer programmatically.
You can extend **napari** using custom shortcuts, key bindings, and mouse functions.
## tutorials
For more details on how to use `napari` checkout our [tutorials](https://napari.org/tutorials/). These are still a work in progress, but we'll be updating them regularly.
## mission, values, and roadmap
For more information about our plans for `napari` you can read our [mission and values statement](https://napari.org/community/mission_and_values.html), which includes more details on our vision for supporting a plugin ecosystem around napari.
You can see details of [the project roadmap here](https://napari.org/roadmaps/index.html).
## contributing
Contributions are encouraged! Please read our [contributing guide](https://napari.org/developers/contributing.html) to get started. Given that we're in an early stage, you may want to reach out on our [Github Issues](https://github.com/napari/napari/issues) before jumping in.
## code of conduct
`napari` has a [Code of Conduct](https://napari.org/community/code_of_conduct.html) that should be honored by everyone who participates in the `napari` community.
## governance
You can learn more about how the `napari` project is organized and managed from our [governance model](https://napari.org/community/governance.html), which includes information about, and ways to contact the [@napari/steering-council and @napari/core-devs](https://napari.org/community/team.html#current-core-developers).
## citing napari
If you find `napari` useful please cite [this repository](https://github.com/napari/napari) using its DOI as follows:
> napari contributors (2019). napari: a multi-dimensional image viewer for python. [doi:10.5281/zenodo.3555620](https://zenodo.org/record/3555620)
Note this DOI will resolve to all versions of napari. To cite a specific version please find the
DOI of that version on our [zenodo page](https://zenodo.org/record/3555620). The DOI of the latest version is in the badge at the top of this page.
## help
We're a community partner on the [image.sc forum](https://forum.image.sc/tags/napari) and all help and support requests should be posted on the forum with the tag `napari`. We look forward to interacting with you there.
Bug reports should be made on our [github issues](https://github.com/napari/napari/issues/new?template=bug_report.md) using
the bug report template. If you think something isn't working, don't hesitate to reach out - it is probably us and not you!
## institutional and funding partners

napari-0.5.0a1/Singularity 0000664 0000000 0000000 00000000137 14370413656 0015457 0 ustar 00root root 0000000 0000000 BootStrap: docker
From: ghcr.io/napari/napari:main
%post
date +"%Y-%m-%d-%H%M" > /last_update
napari-0.5.0a1/asv.conf.json 0000664 0000000 0000000 00000003644 14370413656 0015640 0 ustar 00root root 0000000 0000000 {
// The version of the config file format. Do not change, unless
// you know what you are doing.
"version": 1,
// The name of the project being benchmarked
"project": "napari",
// The project's homepage
"project_url": "http://napari.org/",
// The URL or local path of the source code repository for the
// project being benchmarked
"repo": ".",
// Install using default qt install
"build_command": ["python -V"], // skip build stage
"install_command": ["in-dir={env_dir} python -m pip install {build_dir}[all,testing]"],
"uninstall_command": ["in-dir={env_dir} python -m pip uninstall -y {project}"],
// List of branches to benchmark
"branches": ["main"],
// The tool to use to create environments.
"environment_type": "virtualenv",
// timeout in seconds for installing any dependencies in environment
"install_timeout": 600,
// the base URL to show a commit for the project.
"show_commit_url": "http://github.com/napari/napari/commit/",
// The Pythons you'd like to test against.
"pythons": ["3.9"],
// The directory (relative to the current directory) to cache the Python
// environments in.
"env_dir": ".asv/env",
// The directory (relative to the current directory) that raw benchmark
// results are stored in.
"results_dir": ".asv/results",
// The directory (relative to the current directory) that the html tree
// should be written to.
"html_dir": ".asv/html",
// The directory (relative to the current directory) where the benchamrks
// are stored
"benchmark_dir": "napari/benchmarks",
// The number of characters to retain in the commit hashes.
"hash_length": 8,
// `asv` will cache results of the recent builds in each
// environment, making them faster to install next time. This is
// the number of builds to keep, per environment.
"build_cache_size": 2,
}
napari-0.5.0a1/binder/ 0000775 0000000 0000000 00000000000 14370413656 0014464 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/binder/Desktop/ 0000775 0000000 0000000 00000000000 14370413656 0016075 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/binder/Desktop/napari.desktop 0000775 0000000 0000000 00000000221 14370413656 0020740 0 ustar 00root root 0000000 0000000 [Desktop Entry]
Version=1.0
Type=Application
Name=napari 0.4.x
Exec=napari
Icon=/home/jovyan/napari/resources/icon.ico
Path=/home/jovyan/Desktop
napari-0.5.0a1/binder/apt.txt 0000664 0000000 0000000 00000000227 14370413656 0016012 0 ustar 00root root 0000000 0000000 dbus-x11
xfce4
xfce4-panel
xfce4-session
xfce4-settings
xorg
xubuntu-icon-theme
libxss1
libpci3
libasound2
fonts-ubuntu
qutebrowser
htop
nano
libgles2
napari-0.5.0a1/binder/environment.yml 0000664 0000000 0000000 00000001673 14370413656 0017562 0 ustar 00root root 0000000 0000000 channels:
- conda-forge # Used by jupyter-desktop-server
dependencies:
# See: https://github.com/conda-forge/napari-feedstock/blob/master/recipe/meta.yaml
- python >=3.8
# dependencies matched to pip
- appdirs >=1.4.4
- cachey >=0.2.1
- certifi >=2020.6.20
- dask >=2.1.0
- imageio >=2.5.0
- importlib-metadata >=1.5.0 # not needed for py>37 but keeping for noarch
- jsonschema >=3.2.0
- magicgui >=0.2.6
- napari-console >=0.0.4
- napari-plugin-engine >=0.1.9
- napari-svg >=0.1.4
- numpy >=1.18.5
- numpydoc >=0.9.2
- pillow
- pint >=0.17
- psutil >=5.0
- pyopengl >=3.1.0
- pyyaml >=5.1
- pydantic >=1.8.1
- qtpy >=1.7.0
- scipy >=1.2.0
- superqt >=0.2.2
- tifffile >=2020.2.16
- typing_extensions
- toolz >=0.10.0
- tqdm >=4.56.0
- vispy >=0.6.4
- wrapt >=1.11.1
# additional dependencies for convenience in conda-forge
- fsspec
- pyqt
- scikit-image
- zarr
# Required for desktop view on mybinder.org
- websockify
- pip:
- jupyter-desktop-server
napari-0.5.0a1/binder/postBuild 0000775 0000000 0000000 00000000541 14370413656 0016357 0 ustar 00root root 0000000 0000000 #!/bin/bash
set -euo pipefail
cp -r binder/Desktop ${HOME}/Desktop
# Apply our Xfce settings
mkdir -p ${HOME}/.config/xfce4/xfconf/xfce-perchannel-xml
cp binder/xsettings.xml ${HOME}/.config/xfce4/xfconf/xfce-perchannel-xml/
cp binder/xfce4-panel.xml ${HOME}/.config/xfce4/xfconf/xfce-perchannel-xml/
# Install napari
pip install ${HOME}/ --no-deps
napari-0.5.0a1/binder/xfce4-panel.xml 0000664 0000000 0000000 00000002724 14370413656 0017321 0 ustar 00root root 0000000 0000000
napari-0.5.0a1/binder/xsettings.xml 0000664 0000000 0000000 00000003322 14370413656 0017236 0 ustar 00root root 0000000 0000000
napari-0.5.0a1/bundle.py 0000664 0000000 0000000 00000022324 14370413656 0015047 0 ustar 00root root 0000000 0000000 import configparser
import os
import platform
import re
import shutil
import subprocess
import sys
import time
from contextlib import contextmanager
from pathlib import Path
import tomlkit
APP = 'napari'
# EXTRA_REQS will be added to the bundle, in addition to those specified in
# setup.cfg. To add additional packages to the bundle, or to override any of
# the packages listed here or in `setup.cfg, use the `--add` command line
# argument with a series of "pip install" style strings when running this file.
# For example, the following will ADD ome-zarr, and CHANGE the version of
# PySide2:
# python bundle.py --add 'PySide2==5.15.0' 'ome-zarr'
# This is now defined in setup.cfg "options.extras_require.bundle_run"
# EXTRA_REQS = []
WINDOWS = os.name == 'nt'
MACOS = sys.platform == 'darwin'
LINUX = sys.platform.startswith("linux")
HERE = os.path.abspath(os.path.dirname(__file__))
PYPROJECT_TOML = os.path.join(HERE, 'pyproject.toml')
SETUP_CFG = os.path.join(HERE, 'setup.cfg')
ARCH = (platform.machine() or "generic").lower().replace("amd64", "x86_64")
if WINDOWS:
BUILD_DIR = os.path.join(HERE, 'windows')
APP_DIR = os.path.join(BUILD_DIR, APP, 'src')
EXT, OS = 'msi', 'Windows'
elif LINUX:
BUILD_DIR = os.path.join(HERE, 'linux')
APP_DIR = os.path.join(BUILD_DIR, APP, f'{APP}.AppDir')
EXT, OS = 'AppImage', 'Linux'
elif MACOS:
BUILD_DIR = os.path.join(HERE, 'macOS')
APP_DIR = os.path.join(BUILD_DIR, APP, f'{APP}.app')
EXT, OS = 'dmg', 'macOS'
with open(os.path.join(HERE, "napari", "_version.py")) as f:
match = re.search(r'version\s?=\s?\'([^\']+)', f.read())
if match:
VERSION = match.groups()[0].split('+')[0]
@contextmanager
def patched_toml():
parser = configparser.ConfigParser()
parser.read(SETUP_CFG)
requirements = parser.get("options", "install_requires").splitlines()
requirements = [r.split('#')[0].strip() for r in requirements if r]
with open(PYPROJECT_TOML) as f:
original_toml = f.read()
toml = tomlkit.parse(original_toml)
# Initialize EXTRA_REQS from setup.cfg 'options.extras_require.bundle_run'
bundle_run = parser.get("options.extras_require", "bundle_run")
EXTRA_REQS = [
requirement.split('#')[0].strip()
for requirement in bundle_run.splitlines()
if requirement
]
# parse command line arguments
if '--add' in sys.argv:
for item in sys.argv[sys.argv.index('--add') + 1 :]:
if item.startswith('-'):
break
EXTRA_REQS.append(item)
for item in EXTRA_REQS:
_base = re.split('<|>|=', item, maxsplit=1)[0]
for r in requirements:
if r.startswith(_base):
requirements.remove(r)
break
if _base.lower().startswith('pyqt5'):
try:
i = next(x for x in requirements if x.startswith('PySide'))
requirements.remove(i)
except StopIteration:
pass
requirements += EXTRA_REQS
toml['tool']['briefcase']['app'][APP]['requires'] = requirements
toml['tool']['briefcase']['version'] = VERSION
print("patching pyproject.toml to version: ", VERSION)
print(
"patching pyproject.toml requirements to:",
*toml['tool']['briefcase']['app'][APP]['requires'],
sep="\n ",
)
if MACOS:
# Workaround https://github.com/napari/napari/issues/2965
# Pin revisions to releases _before_ they switched to static libs
revision = {
(3, 6): '11',
(3, 7): '5',
(3, 8): '4',
(3, 9): '1',
}[sys.version_info[:2]]
app_table = toml['tool']['briefcase']['app'][APP]
app_table.add('macOS', tomlkit.table())
app_table['macOS']['support_revision'] = revision
print(
"patching pyproject.toml to pin support package to revision:",
revision,
)
with open(PYPROJECT_TOML, 'w') as f:
f.write(tomlkit.dumps(toml))
try:
yield
finally:
with open(PYPROJECT_TOML, 'w') as f:
f.write(original_toml)
@contextmanager
def patched_dmgbuild():
if not MACOS:
yield
else:
from dmgbuild import core
with open(core.__file__) as f:
src = f.read()
with open(core.__file__, 'w') as f:
f.write(
src.replace(
"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)",
"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True);time.sleep(30)",
)
)
print("patched dmgbuild.core")
try:
yield
finally:
# undo
with open(core.__file__, 'w') as f:
f.write(src)
def add_site_packages_to_path():
# on mac, make sure the site-packages folder exists even before the user
# has pip installed, so it is in sys.path on the first run
# (otherwise, newly installed plugins will not be detected until restart)
if MACOS:
pkgs_dir = os.path.join(
APP_DIR,
'Contents',
'Resources',
'Support',
'lib',
f'python{sys.version_info.major}.{sys.version_info.minor}',
'site-packages',
)
os.makedirs(pkgs_dir)
print("created site-packages at", pkgs_dir)
# on windows, briefcase uses a _pth file to determine the sys.path at
# runtime. https://docs.python.org/3/using/windows.html#finding-modules
# We update that file with the eventual location of pip site-packages
elif WINDOWS:
py = "".join(map(str, sys.version_info[:2]))
python_dir = os.path.join(BUILD_DIR, APP, 'src', 'python')
pth = os.path.join(python_dir, f'python{py}._pth')
with open(pth, "a") as f:
# Append 'hello' at the end of file
f.write(".\\\\Lib\\\\site-packages\n")
print("added bundled site-packages to", pth)
pkgs_dir = os.path.join(python_dir, 'Lib', 'site-packages')
os.makedirs(pkgs_dir)
print("created site-packages at", pkgs_dir)
with open(os.path.join(pkgs_dir, 'readme.txt'), 'w') as f:
f.write("this is where plugin packages will go")
def patch_wxs():
# must run after briefcase create
fname = os.path.join(BUILD_DIR, APP, f'{APP}.wxs')
if os.path.exists(fname):
with open(fname) as f:
source = f.read()
with open(fname, 'w') as f:
f.write(source.replace('pythonw.exe', 'python.exe'))
print("patched pythonw.exe -> python.exe")
def patch_python_lib_location():
# must run after briefcase create
support = os.path.join(
BUILD_DIR, APP, APP + ".app", "Contents", "Resources", "Support"
)
python_resources = os.path.join(support, "Python", "Resources")
if os.path.exists(python_resources):
return
os.makedirs(python_resources, exist_ok=True)
for subdir in ("bin", "lib"):
orig = os.path.join(support, subdir)
dest = os.path.join(python_resources, subdir)
os.symlink("../../" + subdir, dest)
print("symlinking", orig, "to", dest)
def add_sentinel_file():
if MACOS:
(Path(APP_DIR) / "Contents" / "MacOS" / ".napari_is_bundled").touch()
elif LINUX:
(Path(APP_DIR) / "usr" / "bin" / ".napari_is_bundled").touch()
elif WINDOWS:
(Path(APP_DIR) / "python" / ".napari_is_bundled").touch()
else:
print("!!! Sentinel files not yet implemented in", sys.platform)
def patch_environment_variables():
os.environ["ARCH"] = ARCH
def make_zip():
import glob
import zipfile
artifact = glob.glob(os.path.join(BUILD_DIR, f"*.{EXT}"))[0]
dest = f'napari-{VERSION}-{OS}-{ARCH}.zip'
with zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED) as zf:
zf.write(artifact, arcname=os.path.basename(artifact))
print("created zipfile: ", dest)
return dest
def clean():
shutil.rmtree(BUILD_DIR, ignore_errors=True)
def bundle():
clean()
if LINUX:
patch_environment_variables()
# smoke test, and build resources
subprocess.check_call([sys.executable, '-m', APP, '--info'])
# the briefcase calls need to happen while the pyproject toml is patched
with patched_toml(), patched_dmgbuild():
# create
cmd = ['briefcase', 'create', '-v'] + (
['--no-docker'] if LINUX else []
)
subprocess.check_call(cmd)
time.sleep(0.5)
add_site_packages_to_path()
add_sentinel_file()
if WINDOWS:
patch_wxs()
elif MACOS:
patch_python_lib_location()
# build
cmd = ['briefcase', 'build', '-v'] + (['--no-docker'] if LINUX else [])
subprocess.check_call(cmd)
# package
cmd = ['briefcase', 'package', '-v']
cmd += ['--no-sign'] if MACOS else (['--no-docker'] if LINUX else [])
subprocess.check_call(cmd)
# compress
dest = make_zip()
clean()
return dest
if __name__ == "__main__":
if '--clean' in sys.argv:
clean()
sys.exit()
if '--version' in sys.argv:
print(VERSION)
sys.exit()
if '--arch' in sys.argv:
print(ARCH)
sys.exit()
print('created', bundle())
napari-0.5.0a1/codecov.yml 0000664 0000000 0000000 00000001620 14370413656 0015365 0 ustar 00root root 0000000 0000000 ignore:
- napari/_version.py
- napari/resources
- napari/benchmarks
coverage:
status:
project:
default: false
library:
target: auto
paths: ['!.*/_tests/.*']
threshold: 1%
qt:
target: auto
paths: ['napari/_qt/.*', '!.*/_tests/.*']
threshold: 1%
layers:
target: auto
paths: [ 'napari/layers/.*', '!.*/_tests/.*' ]
threshold: 1%
utils:
target: auto
paths: [ 'napari/utils/.*', '!.*/_tests/.*' ]
threshold: 2%
tests:
target: auto
paths: ['.*/_tests/.*']
threshold: 1% # coverage can drop by up to 1% while still posting success
patch:
default:
threshold: 1%
target: 0%
codecov:
notify:
after_n_builds: 11
comment:
require_changes: true # if true: only post the PR comment if coverage changes
after_n_builds: 11 napari-0.5.0a1/dockerfile 0000664 0000000 0000000 00000004444 14370413656 0015261 0 ustar 00root root 0000000 0000000 FROM --platform=linux/amd64 ubuntu:22.04 AS napari
# if you change the Ubuntu version, remember to update
# the APT definitions for Xpra below so it reflects the
# new codename (e.g. 20.04 was focal, 22.04 had jammy)
# below env var required to install libglib2.0-0 non-interactively
ENV TZ=America/Los_Angeles
ARG DEBIAN_FRONTEND=noninteractive
# install python resources + graphical libraries used by qt and vispy
RUN apt-get update && \
apt-get install -qqy \
build-essential \
python3.9 \
python3-pip \
git \
mesa-utils \
libgl1-mesa-glx \
libglib2.0-0 \
libfontconfig1 \
libxrender1 \
libdbus-1-3 \
libxkbcommon-x11-0 \
libxi6 \
libxcb-icccm4 \
libxcb-image0 \
libxcb-keysyms1 \
libxcb-randr0 \
libxcb-render-util0 \
libxcb-xinerama0 \
libxcb-xinput0 \
libxcb-xfixes0 \
libxcb-shape0 \
&& apt-get clean
# install napari release version
RUN pip3 install napari[all]
# copy examples
COPY examples /tmp/examples
ENTRYPOINT ["python3", "-m", "napari"]
#########################################################
# Extend napari with a preconfigured Xpra server target #
#########################################################
FROM napari AS napari-xpra
# Install Xpra and dependencies
RUN apt-get install -y wget gnupg2 apt-transport-https && \
wget -O - https://xpra.org/gpg.asc | apt-key add - && \
echo "deb https://xpra.org/ jammy main" > /etc/apt/sources.list.d/xpra.list
RUN apt-get update && \
apt-get install -yqq \
xpra \
xvfb \
xterm \
sshfs && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV DISPLAY=:100
ENV XPRA_PORT=9876
ENV XPRA_START="python3 -m napari"
ENV XPRA_EXIT_WITH_CLIENT="yes"
ENV XPRA_XVFB_SCREEN="1920x1080x24+32"
EXPOSE 9876
CMD echo "Launching napari on Xpra. Connect via http://localhost:$XPRA_PORT"; \
xpra start \
--bind-tcp=0.0.0.0:$XPRA_PORT \
--html=on \
--start="$XPRA_START" \
--exit-with-client="$XPRA_EXIT_WITH_CLIENT" \
--daemon=no \
--xvfb="/usr/bin/Xvfb +extension Composite -screen 0 $XPRA_XVFB_SCREEN -nolisten tcp -noreset" \
--pulseaudio=no \
--notifications=no \
--bell=no \
$DISPLAY
ENTRYPOINT []
napari-0.5.0a1/examples/ 0000775 0000000 0000000 00000000000 14370413656 0015037 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/examples/3D_paths.py 0000664 0000000 0000000 00000001725 14370413656 0017063 0 ustar 00root root 0000000 0000000 """
3D Paths
========
Display two vectors layers ontop of a 4-D image layer. One of the vectors
layers is 3D and "sliced" with a different set of vectors appearing on
different 3D slices. Another is 2D and "broadcast" with the same vectors
appearing on each slice.
.. tags:: visualization-advanced, layers
"""
import numpy as np
from skimage import data
import napari
blobs = data.binary_blobs(
length=128, blob_size_fraction=0.05, n_dim=3, volume_fraction=0.05
)
viewer = napari.Viewer(ndisplay=3)
viewer.add_image(blobs.astype(float))
# sample vector coord-like data
path = np.array([np.array([[0, 0, 0], [0, 10, 10], [0, 5, 15], [20, 5, 15],
[56, 70, 21], [127, 127, 127]]),
np.array([[0, 0, 0], [0, 10, 10], [0, 5, 15], [0, 5, 15],
[0, 70, 21], [0, 127, 127]])])
print('Path', path.shape)
layer = viewer.add_shapes(
path, shape_type='path', edge_width=4, edge_color=['red', 'blue']
)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/3Dimage_plane_rendering.py 0000664 0000000 0000000 00000002653 14370413656 0022104 0 ustar 00root root 0000000 0000000 """
3D image plane rendering
========================
Display one 3D image layer and display it as a plane
with a simple widget for modifying plane parameters.
.. tags:: visualization-advanced, gui, layers
"""
import numpy as np
from skimage import data
import napari
from napari.utils.translations import trans
viewer = napari.Viewer(ndisplay=3)
# add a 3D image
blobs = data.binary_blobs(
length=64, volume_fraction=0.1, n_dim=3
).astype(np.float32)
image_layer = viewer.add_image(
blobs, rendering='mip', name='volume', blending='additive', opacity=0.25
)
# add the same 3D image and render as plane
# plane should be in 'additive' blending mode or depth looks all wrong
plane_parameters = {
'position': (32, 32, 32),
'normal': (0, 1, 0),
'thickness': 10,
}
plane_layer = viewer.add_image(
blobs,
rendering='average',
name='plane',
depiction='plane',
blending='additive',
opacity=0.5,
plane=plane_parameters
)
viewer.axes.visible = True
viewer.camera.angles = (45, 45, 45)
viewer.camera.zoom = 5
viewer.text_overlay.text = trans._(
"""
shift + click and drag to move the plane
press 'x', 'y' or 'z' to orient the plane along that axis around the cursor
press 'o' to orient the plane normal along the camera view direction
press and hold 'o' then click and drag to make the plane normal follow the camera
"""
)
viewer.text_overlay.visible = True
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/3d_kymograph_.py 0000664 0000000 0000000 00000012060 14370413656 0020136 0 ustar 00root root 0000000 0000000 """
3D Kymographs
=============
This example demonstrates that the volume rendering capabilities of napari
can also be used to render 2d timelapse acquisitions as kymographs.
.. tags:: experimental
"""
from itertools import product
import numpy as np
from tqdm import tqdm
import napari
try:
from omero.gateway import BlitzGateway
except ModuleNotFoundError:
print("Could not import BlitzGateway which is")
print("required to download the sample datasets.")
print("Please install omero-py:")
print("https://pypi.org/project/omero-py/")
exit(-1)
def IDR_fetch_image(image_id: int, progressbar: bool = True) -> np.ndarray:
"""
Download the image with id image_id from the IDR
Will fetch all image planes corresponding to separate
timepoints/channels/z-slices and return a numpy
array with dimension order (t,z,y,x,c)
Displaying download progress can be disabled by passing
False to progressbar.
"""
conn = BlitzGateway(
host="ws://idr.openmicroscopy.org/omero-ws",
username="public",
passwd="public",
secure=True,
)
conn.connect()
conn.c.enableKeepAlive(60)
idr_img = conn.getObject("Image", image_id)
idr_pixels = idr_img.getPrimaryPixels()
_ = idr_img
nt, nz, ny, nx, nc = (
_.getSizeT(),
_.getSizeZ(),
_.getSizeY(),
_.getSizeX(),
_.getSizeC(),
)
plane_indices = list(product(range(nz), range(nc), range(nt)))
idr_plane_iterator = idr_pixels.getPlanes(plane_indices)
if progressbar:
idr_plane_iterator = tqdm(idr_plane_iterator, total=len(plane_indices))
_tmp = np.asarray(list(idr_plane_iterator))
_tmp = _tmp.reshape((nz, nc, nt, ny, nx))
# the following line reorders the axes (no summing, despite the name)
return np.einsum("jmikl", _tmp)
description = """
3D-Kymographs in Napari
=======================
About
=====
This example demonstrates that the volume rendering capabilities of napari
can also be used to render 2d timelapse acquisitions as kymographs.
Kymographs, also called space-time images, are a powerful tool to visualize
the dynamics of processes.
The most common way to visualize kymographs is to pick a single line through
a 2D image and visualize the time domain along a second axes.
Napari is not limited to 2D visualization an by harnessing its volume
volume rendering capabilities, we can create a 3D kymograph,
a powerful visualization that provides an overview of the complete
spatial and temporal data from a single view.
Using napari's grid mode we can juxtapose multiple such 3D kymographs to
highlight the differences in cell dynamics under different siRNA treatments.
The selected samples are from the Mitocheck screen and demonstrate siRNA
knockdowns of several genes.
The date is timelapse fluorescence microscopy of HeLa cells, with GFP-
tagged histone revealing the chromosomes.
In the juxtaposed kymographs the reduced branching for the mitotitic
phenotypes caused by INCENP, AURKB and KIF11 knockdown compared to
TMPRSS11A knockdown is immediately obvious.
Data Source
===========
The samples to demonstrate this is downloaded from IDR:
https://idr.openmicroscopy.org/webclient/?show=screen-1302
Reference
=========
The data comes from the Mitocheck screen:
Phenotypic profiling of the human genome by time-lapse microscopy reveals cell
division genes.
Neumann B, Walter T, Hériché JK, Bulkescher J, Erfle H, Conrad C, Rogers P,
Poser I, Held M, Liebel U, Cetin C, Sieckmann F, Pau G, Kabbe R, Wünsche A,
Satagopam V, Schmitz MH, Chapuis C, Gerlich DW, Schneider R, Eils R, Huber W,
Peters JM, Hyman AA, Durbin R, Pepperkok R, Ellenberg J.
Nature. 2010 Apr 1;464(7289):721-7.
doi: 10.1038/nature08869.
Acknowledgements
================
Beate Neumann (EMBL) for helpful advice on mitotic phenotypes.
"""
print(description)
samples = (
{"IDRid": 2864587, "description": "AURKB knockdown", "vol": None},
{"IDRid": 2862565, "description": "KIF11 knockdown", "vol": None},
{"IDRid": 2867896, "description": "INCENP knockdown", "vol": None},
{"IDRid": 1486532, "description": "TMPRSS11A knockdown", "vol": None},
)
print("-------------------------------------------------------")
print("Sample datasets will require ~490 MB download from IDR.")
answer = input("Press Enter to proceed, 'n' to cancel: ")
if answer.lower().startswith('n'):
print("User cancelled download. Exiting.")
exit(0)
print("-------------------------------------------------------")
for s in samples:
print(f"Downloading sample {s['IDRid']}.")
print(f"Description: {s['description']}")
s["vol"] = np.squeeze(IDR_fetch_image(s["IDRid"]))
v = napari.Viewer(ndisplay=3)
scale = (5, 1, 1) # "stretch" time domain
for s in samples:
v.add_image(
s["vol"], name=s['description'], scale=scale, blending="opaque"
)
v.grid.enabled = True # show the volumes in grid mode
v.axes.visible = True # magenta error shows time direction
# set an oblique view angle onto the kymograph grid
v.camera.center = (440, 880, 1490)
v.camera.angles = (-20, 23, -50)
v.camera.zoom = 0.17
napari.run()
napari-0.5.0a1/examples/README.rst 0000664 0000000 0000000 00000000001 14370413656 0016515 0 ustar 00root root 0000000 0000000
napari-0.5.0a1/examples/action_manager.py 0000664 0000000 0000000 00000007147 14370413656 0020371 0 ustar 00root root 0000000 0000000 """
Action manager
==============
.. tags:: gui, experimental
"""
from random import shuffle
import numpy as np
from skimage import data
import napari
from napari._qt.widgets.qt_viewer_buttons import QtViewerPushButton
from napari.components import ViewerModel
from napari.utils.action_manager import action_manager
def rotate45(viewer: napari.Viewer):
"""
Rotate layer 0 of the viewer by 45º
Parameters
----------
viewer : napari.Viewer
active (unique) instance of the napari viewer
Notes
-----
The `viewer` parameter needs to be named `viewer`, the action manager will
infer that we need an instance of viewer.
"""
angle = np.pi / 4
from numpy import cos, sin
r = np.array([[cos(angle), -sin(angle)], [sin(angle), cos(angle)]])
layer = viewer.layers[0]
layer.rotate = layer.rotate @ r
# create the viewer with an image
viewer = napari.view_image(data.astronaut(), rgb=True)
layer_buttons = viewer.window.qt_viewer.layerButtons
# Button do not need to do anything, just need to be pretty; all the action
# binding and (un) binding will be done with the action manager, idem for
# setting the tooltip.
rot_button = QtViewerPushButton('warning')
layer_buttons.layout().insertWidget(3, rot_button)
def register_action():
# Here we pass ViewerModel as the KeymapProvider as we want it to handle the shortcuts.
# we could also pass none and bind the shortcuts at the window level – though we
# are trying to not change the KeymapProvider API too much for now.
# we give an action name to the action for configuration purposes as we need
# it to be storable in json.
# By convention (may be enforce later), we do give an action name which is iprefixed
# by the name of the package it is defined in, here napari,
action_manager.register_action(
name='napari:rotate45',
command=rotate45,
description='Rotate layer 0 by 45deg',
keymapprovider=ViewerModel,
)
def bind_shortcut():
# note that the tooltip of the corresponding button will be updated to
# remove the shortcut.
action_manager.unbind_shortcut('napari:reset_view') # Control-R
action_manager.bind_shortcut('napari:rotate45', 'Control-R')
def bind_button():
action_manager.bind_button('napari:rotate45', rot_button)
# we can all bind_shortcut or register_action or bind_button in any order;
# this let us configure shortcuts even if plugins are loaded / unloaded.
callbacks = [register_action, bind_shortcut, bind_button]
shuffle(callbacks)
for c in callbacks:
print('calling', c)
c()
# We can set the action manager in debug mode, to help us figure out which
# button is triggering which action. This will update the tooltips of the buttons
# to include the name of the action in between square brackets.
action_manager._debug(True)
# Let's also modify some existing shortcuts, by unbinding a few existing actions,
# and rebinding them with new shortcuts; below we change the add and select mode
# to be the = (same as + key on US Keyboards but without modifiers) and - keys.
# unbinding returns the old key if it exists; but we don't use it.
# in practice you likely don't need to modify the shortcuts this way as it will
# be implemented in settings, though you could imagine a plugin that would
# allow toggling between many keymaps.
settings = {
'napari:activate_points_add_mode' : '=',
'napari:activate_points_select_mode': '-',
}
for action, key in settings.items():
_old_shortcut = action_manager.unbind_shortcut(action)
action_manager.bind_shortcut(action, key)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add-points-3d.py 0000664 0000000 0000000 00000001225 14370413656 0017757 0 ustar 00root root 0000000 0000000 """
Add points 3D
=============
Display a labels layer above of an image layer using the add_labels and
add_image APIs, then add points in 3D
.. tags:: visualization-nD
"""
from scipy import ndimage as ndi
from skimage import data
import napari
blobs = data.binary_blobs(
length=128, volume_fraction=0.1, n_dim=3
)[::2].astype(float)
labeled = ndi.label(blobs)[0]
viewer = napari.Viewer(ndisplay=3)
viewer.add_image(blobs, name='blobs', scale=(2, 1, 1))
viewer.add_labels(labeled, name='blob ID', scale=(2, 1, 1))
pts = viewer.add_points()
viewer.camera.angles = (0, -65, 85)
pts.mode = 'add'
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_3D_image.py 0000664 0000000 0000000 00000000614 14370413656 0017632 0 ustar 00root root 0000000 0000000 """
Add 3D image
============
Display a 3D image layer using the :meth:`add_image` API.
.. tags:: visualization-nD, layers
"""
from skimage import data
import napari
blobs = data.binary_blobs(length=64, volume_fraction=0.1, n_dim=3).astype(
float
)
viewer = napari.Viewer(ndisplay=3)
# add the volume
viewer.add_image(blobs, scale=[3, 1, 1])
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_grayscale_image.py 0000664 0000000 0000000 00000000712 14370413656 0021335 0 ustar 00root root 0000000 0000000 """
Add grayscale image
===================
Display one grayscale image using the add_image API.
.. tags:: visualization-basic
"""
import numpy as np
from skimage import data
import napari
# simulating a grayscale image here for testing contrast limits adjustments
image = data.astronaut().mean(-1) * 100 + 100
image += np.random.rand(*image.shape) * 3000
viewer = napari.view_image(image.astype(np.uint16))
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_image.py 0000664 0000000 0000000 00000000440 14370413656 0017301 0 ustar 00root root 0000000 0000000 """
Add image
=========
Display one image using the :func:`view_image` API.
.. tags:: visualization-basic
"""
from skimage import data
import napari
# create the viewer with an image
viewer = napari.view_image(data.astronaut(), rgb=True)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_image_transformed.py 0000664 0000000 0000000 00000000556 14370413656 0021715 0 ustar 00root root 0000000 0000000 """
Add image transformed
=====================
Display one image and transform it using the :func:`view_image` API.
.. tags:: visualization-basic
"""
from skimage import data
import napari
# create the viewer with an image and transform (rotate) it
viewer = napari.view_image(data.astronaut(), rgb=True, rotate=45)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_labels.py 0000664 0000000 0000000 00000001607 14370413656 0017467 0 ustar 00root root 0000000 0000000 """
Add labels
==========
Display a labels layer above of an image layer using the ``add_labels`` and
``add_image`` APIs
.. tags:: layers, visualization-basic
"""
from skimage import data
from skimage.filters import threshold_otsu
from skimage.measure import label
from skimage.morphology import closing, remove_small_objects, square
from skimage.segmentation import clear_border
import napari
image = data.coins()[50:-50, 50:-50]
# apply threshold
thresh = threshold_otsu(image)
bw = closing(image > thresh, square(4))
# remove artifacts connected to image border
cleared = remove_small_objects(clear_border(bw), 20)
# label image regions
label_image = label(cleared)
# initialise viewer with coins image
viewer = napari.view_image(image, name='coins', rgb=False)
# add the labels
label_layer = viewer.add_labels(label_image, name='segmentation')
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_labels_with_features.py 0000664 0000000 0000000 00000002755 14370413656 0022425 0 ustar 00root root 0000000 0000000 """
Add labels with features
========================
Display a labels layer with various features
.. tags:: layers, analysis
"""
import numpy as np
from skimage import data
from skimage.filters import threshold_otsu
from skimage.measure import label
from skimage.morphology import closing, remove_small_objects, square
from skimage.segmentation import clear_border
import napari
image = data.coins()[50:-50, 50:-50]
# apply threshold
thresh = threshold_otsu(image)
bw = closing(image > thresh, square(4))
# remove artifacts connected to image border
cleared = remove_small_objects(clear_border(bw), 20)
# label image regions
label_image = label(cleared)
# initialise viewer with coins image
viewer = napari.view_image(image, name='coins', rgb=False)
# get the size of each coin (first element is background area)
label_areas = np.bincount(label_image.ravel())[1:]
# split coins into small or large
size_range = max(label_areas) - min(label_areas)
small_threshold = min(label_areas) + (size_range / 2)
coin_sizes = np.where(label_areas > small_threshold, 'large', 'small')
label_features = {
'row': ['none']
+ ['top'] * 4
+ ['bottom'] * 4, # background is row: none
'size': ['none'] + list(coin_sizes), # background is size: none
}
color = {1: 'white', 2: 'blue', 3: 'green', 4: 'red', 5: 'yellow'}
# add the labels
label_layer = viewer.add_labels(
label_image,
name='segmentation',
features=label_features,
color=color,
)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_multiscale_image.py 0000664 0000000 0000000 00000001122 14370413656 0021521 0 ustar 00root root 0000000 0000000 """
Add multiscale image
====================
Displays a multiscale image
.. tags:: visualization-advanced
"""
import numpy as np
from skimage import data
from skimage.transform import pyramid_gaussian
import napari
# create multiscale from astronaut image
base = np.tile(data.astronaut(), (8, 8, 1))
multiscale = list(
pyramid_gaussian(base, downscale=2, max_layer=4, multichannel=True)
)
print('multiscale level shapes: ', [p.shape[:2] for p in multiscale])
# add image multiscale
viewer = napari.view_image(multiscale, multiscale=True)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_points.py 0000664 0000000 0000000 00000002242 14370413656 0017535 0 ustar 00root root 0000000 0000000 """
Add points
==========
Display a points layer on top of an image layer using the ``add_points`` and
``add_image`` APIs
.. tags:: visualization-basic
"""
import numpy as np
from skimage import data
from skimage.color import rgb2gray
import napari
# add the image
viewer = napari.view_image(rgb2gray(data.astronaut()))
# add the points
points = np.array([[100, 100], [200, 200], [333, 111]])
size = np.array([10, 20, 20])
viewer.add_points(points, size=size)
# unselect the image layer
viewer.layers.selection.discard(viewer.layers[0])
# adjust some of the points layer attributes
layer = viewer.layers[1]
# change the layer name
layer.name = 'points'
# change the layer visibility
layer.visible = False
layer.visible = True
# select the layer
viewer.layers.selection.add(layer)
# deselect the layer
viewer.layers.selection.remove(layer)
# or: viewer.layers.selection.discard(layer)
# change the layer opacity
layer.opacity = 0.9
# change the layer point symbol using an alias
layer.symbol = '+'
# change the layer point out_of_slice_display status
layer.out_of_slice_display = True
# change the layer mode
layer.mode = 'add'
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_points_on_nD_shapes.py 0000664 0000000 0000000 00000003442 14370413656 0022220 0 ustar 00root root 0000000 0000000 """
Add points on nD shapes
=======================
Add points on nD shapes in 3D using a mouse callback
.. tags:: visualization-nD
"""
import numpy as np
import napari
# Create rectangles in 4D
data = [
[
[0, 50, 75, 75],
[0, 50, 125, 75],
[0, 100, 125, 125],
[0, 100, 75, 125]
],
[
[0, 10, 75, 75],
[0, 10, 125, 75],
[0, 40, 125, 125],
[0, 40, 75, 125]
],
[
[1, 100, 75, 75],
[1, 100, 125, 75],
[1, 50, 125, 125],
[1, 50, 75, 125]
]
]
shapes_data = np.array(data)
# add an empty 4d points layer
viewer = napari.view_points(ndim=4, size=3)
points_layer = viewer.layers[0]
# add the shapes layer to the viewer
features = {'index': [0, 1, 2]}
for shape_type, mult in {('ellipse', 1), ('rectangle', -1)}:
shapes_layer = viewer.add_shapes(
shapes_data * mult,
face_color=['magenta', 'green', 'blue'],
edge_color='white',
blending='additive',
features=features,
text='index',
shape_type=shape_type,
)
@shapes_layer.mouse_drag_callbacks.append
def on_click(layer, event):
shape_index, intersection_point = layer.get_index_and_intersection(
event.position, event.view_direction, event.dims_displayed
)
if (shape_index is not None) and (intersection_point is not None):
points_layer.add(intersection_point)
for d in data:
viewer.add_points(np.array(d))
# set the viewer to 3D rendering mode with the first two rectangles in view
viewer.dims.ndisplay = 3
viewer.dims.set_point(axis=0, value=0)
viewer.camera.angles = (70, 30, 150)
viewer.camera.zoom = 2.5
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_points_with_features.py 0000664 0000000 0000000 00000003665 14370413656 0022500 0 ustar 00root root 0000000 0000000 """
Add points with features
========================
Display a points layer on top of an image layer using the ``add_points`` and
``add_image`` APIs
.. tags:: visualization-basic
"""
import numpy as np
from skimage import data
from skimage.color import rgb2gray
import napari
# add the image
viewer = napari.view_image(rgb2gray(data.astronaut()))
# add the points
points = np.array([[100, 100], [200, 200], [333, 111]])
# create features for each point
features = {
'confidence': np.array([1, 0.5, 0]),
'good_point': np.array([True, False, False])
}
# define the color cycle for the face_color annotation
face_color_cycle = ['blue', 'green']
# create a points layer where the face_color is set by the good_point feature
# and the edge_color is set via a color map (grayscale) on the confidence
# feature.
points_layer = viewer.add_points(
points,
features=features,
size=20,
edge_width=7,
edge_width_is_relative=False,
edge_color='confidence',
edge_colormap='gray',
face_color='good_point',
face_color_cycle=face_color_cycle
)
# set the edge_color mode to colormap
points_layer.edge_color_mode = 'colormap'
# bind a function to toggle the good_point annotation of the selected points
@viewer.bind_key('t')
def toggle_point_annotation(viewer):
selected_points = list(points_layer.selected_data)
if len(selected_points) > 0:
good_point = points_layer.features['good_point']
good_point[selected_points] = ~good_point[selected_points]
points_layer.features['good_point'] = good_point
# we need to manually refresh since we did not use the Points.features
# setter to avoid changing the color map if all points get toggled to
# the same class, we set update_colors=False (only re-colors the point
# using the previously-determined color mapping).
points_layer.refresh_colors(update_color_mapping=False)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_points_with_multicolor_text.py 0000664 0000000 0000000 00000002546 14370413656 0024114 0 ustar 00root root 0000000 0000000 """
Add points with multicolor text
===============================
Display a points layer on top of an image layer with text using
multiple face colors mapped from features for the points and text.
.. tags:: visualization-basic
"""
import numpy as np
import napari
# add the image with three points
viewer = napari.view_image(np.zeros((400, 400)))
points = np.array([[100, 100], [200, 300], [333, 111]])
# create features for each point
features = {
'confidence': np.array([1, 0.5, 0]),
'good_point': np.array([True, False, False]),
}
# define the color cycle for the points face and text colors
color_cycle = ['blue', 'green']
text = {
'string': 'Confidence is {confidence:.2f}',
'size': 20,
'color': {'feature': 'good_point', 'colormap': color_cycle},
'translation': np.array([-30, 0]),
}
# create a points layer where the face_color is set by the good_point feature
# and the edge_color is set via a color map (grayscale) on the confidence
# feature
points_layer = viewer.add_points(
points,
features=features,
text=text,
size=20,
edge_width=7,
edge_width_is_relative=False,
edge_color='confidence',
edge_colormap='gray',
face_color='good_point',
face_color_cycle=color_cycle,
)
# set the edge_color mode to colormap
points_layer.edge_color_mode = 'colormap'
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_points_with_text.py 0000664 0000000 0000000 00000002406 14370413656 0021636 0 ustar 00root root 0000000 0000000 """
Add points with text
====================
Display a points layer on top of an image layer using the ``add_points`` and
``add_image`` APIs
.. tags:: visualization-basic
"""
import numpy as np
import napari
# add the image
viewer = napari.view_image(np.zeros((400, 400)))
# add the points
points = np.array([[100, 100], [200, 300], [333, 111]])
# create features for each point
features = {
'confidence': np.array([1, 0.5, 0]),
'good_point': np.array([True, False, False]),
}
# define the color cycle for the face_color annotation
face_color_cycle = ['blue', 'green']
text = {
'string': 'Confidence is {confidence:.2f}',
'size': 20,
'color': 'green',
'translation': np.array([-30, 0]),
}
# create a points layer where the face_color is set by the good_point feature
# and the edge_color is set via a color map (grayscale) on the confidence
# feature.
points_layer = viewer.add_points(
points,
features=features,
text=text,
size=20,
edge_width=7,
edge_width_is_relative=False,
edge_color='confidence',
edge_colormap='gray',
face_color='good_point',
face_color_cycle=face_color_cycle,
)
# set the edge_color mode to colormap
points_layer.edge_color_mode = 'colormap'
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_shapes.py 0000664 0000000 0000000 00000004254 14370413656 0017511 0 ustar 00root root 0000000 0000000 """
Add shapes
==========
Display one shapes layer ontop of one image layer using the ``add_shapes`` and
``add_image`` APIs. When the window is closed it will print the coordinates of
your shapes.
.. tags:: visualization-basic
"""
import numpy as np
from skimage import data
import napari
# add the image
viewer = napari.view_image(data.camera(), name='photographer')
# create a list of polygons
polygons = [
np.array([[11, 13], [111, 113], [22, 246]]),
np.array(
[
[505, 60],
[402, 71],
[383, 42],
[251, 95],
[212, 59],
[131, 137],
[126, 187],
[191, 204],
[171, 248],
[211, 260],
[273, 243],
[264, 225],
[430, 173],
[512, 160],
]
),
np.array(
[
[310, 382],
[229, 381],
[209, 401],
[221, 411],
[258, 411],
[300, 412],
[306, 435],
[268, 434],
[265, 454],
[298, 461],
[307, 461],
[307, 507],
[349, 510],
[352, 369],
[330, 366],
[330, 366],
]
),
]
# add polygons
layer = viewer.add_shapes(
polygons,
shape_type='polygon',
edge_width=1,
edge_color='coral',
face_color='royalblue',
name='shapes',
)
# shapes of each type can also be added via their respective add_ method
# e.g. for the polygons above:
# layer = viewer.add_shapes(name='shapes') # create empty layer
# layer.add_polygons(
# polygons,
# edge_width=1,
# edge_color='coral',
# face_color='royalblue',
# )
# change some attributes of the layer
layer.selected_data = set(range(layer.nshapes))
layer.current_edge_width = 5
layer.selected_data = set()
# add an ellipse to the layer
ellipse = np.array([[59, 222], [110, 289], [170, 243], [119, 176]])
layer.add(
ellipse,
shape_type='ellipse',
edge_width=5,
edge_color='coral',
face_color='purple',
)
# To save layers to svg:
# viewer.layers.save('viewer.svg', plugin='svg')
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_shapes_with_features.py 0000664 0000000 0000000 00000003671 14370413656 0022444 0 ustar 00root root 0000000 0000000 """
Add shapes with features
========================
Display one shapes layer ontop of one image layer using the ``add_shapes`` and
``add_image`` APIs. When the window is closed it will print the coordinates of
your shapes.
.. tags:: visualization-basic
"""
import numpy as np
from skimage import data
import napari
# add the image
viewer = napari.view_image(data.camera(), name='photographer')
# create a list of polygons
polygons = [
np.array([[11, 13], [111, 113], [22, 246]]),
np.array(
[
[505, 60],
[402, 71],
[383, 42],
[251, 95],
[212, 59],
[131, 137],
[126, 187],
[191, 204],
[171, 248],
[211, 260],
[273, 243],
[264, 225],
[430, 173],
[512, 160],
]
),
np.array(
[
[310, 382],
[229, 381],
[209, 401],
[221, 411],
[258, 411],
[300, 412],
[306, 435],
[268, 434],
[265, 454],
[298, 461],
[307, 461],
[307, 507],
[349, 510],
[352, 369],
[330, 366],
[330, 366],
]
),
]
# create features
features = {
'likelihood': [0.2, 0.5, 1],
'class': ['sky', 'person', 'building'],
}
face_color_cycle = ['blue', 'magenta', 'green']
# add polygons
layer = viewer.add_shapes(
polygons,
features=features,
shape_type='polygon',
edge_width=1,
edge_color='likelihood',
edge_colormap='gray',
face_color='class',
face_color_cycle=face_color_cycle,
name='shapes',
)
# change some attributes of the layer
layer.selected_data = set(range(layer.nshapes))
layer.current_edge_width = 5
layer.selected_data = set()
# To save layers to svg:
# viewer.layers.save('viewer.svg', plugin='svg')
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_shapes_with_text.py 0000664 0000000 0000000 00000002554 14370413656 0021611 0 ustar 00root root 0000000 0000000 """
Add shapes with text
====================
Display one shapes layer ontop of one image layer using the ``add_shapes`` and
``add_image`` APIs. When the window is closed it will print the coordinates of
your shapes.
.. tags:: visualization-basic
"""
import numpy as np
from skimage import data
import napari
# add the image
viewer = napari.view_image(data.camera(), name='photographer')
# create a list of polygons
polygons = [
np.array([[225, 146], [283, 146], [283, 211], [225, 211]]),
np.array([[67, 182], [167, 182], [167, 268], [67, 268]]),
np.array([[111, 336], [220, 336], [220, 240], [111, 240]]),
]
# create features
features = {
'likelihood': [21.23423, 51.2315, 100],
'class': ['hand', 'face', 'camera'],
}
edge_color_cycle = ['blue', 'magenta', 'green']
text = {
'string': '{class}: {likelihood:0.1f}%',
'anchor': 'upper_left',
'translation': [-5, 0],
'size': 8,
'color': 'green',
}
# add polygons
shapes_layer = viewer.add_shapes(
polygons,
features=features,
shape_type='polygon',
edge_width=3,
edge_color='class',
edge_color_cycle=edge_color_cycle,
face_color='transparent',
text=text,
name='shapes',
)
# change some attributes of the layer
shapes_layer.opacity = 1
# To save layers to svg:
# viewer.layers.save('viewer.svg', plugin='svg')
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_surface_2D.py 0000664 0000000 0000000 00000000570 14370413656 0020200 0 ustar 00root root 0000000 0000000 """
Add surface 2D
==============
Display a 2D surface
.. tags:: visualization-basic
"""
import numpy as np
import napari
data = np.array([[0, 0], [0, 20], [10, 0], [10, 10]])
faces = np.array([[0, 1, 2], [1, 2, 3]])
values = np.linspace(0, 1, len(data))
# add the surface
viewer = napari.view_surface((data, faces, values))
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_vectors.py 0000664 0000000 0000000 00000002005 14370413656 0017703 0 ustar 00root root 0000000 0000000 """
Add vectors
===========
This example generates an image of vectors
Vector data is an array of shape (N, 4)
Each vector position is defined by an (x, y, x-proj, y-proj) element where
* x and y are the center points
* x-proj and y-proj are the vector projections at each center
.. tags:: visualization-basic
"""
import numpy as np
from skimage import data
import napari
# create the viewer and window
viewer = napari.Viewer()
layer = viewer.add_image(data.camera(), name='photographer')
# sample vector coord-like data
n = 200
pos = np.zeros((n, 2, 2), dtype=np.float32)
phi_space = np.linspace(0, 4 * np.pi, n)
radius_space = np.linspace(0, 100, n)
# assign x-y position
pos[:, 0, 0] = radius_space * np.cos(phi_space) + 300
pos[:, 0, 1] = radius_space * np.sin(phi_space) + 256
# assign x-y projection
pos[:, 1, 0] = 2 * radius_space * np.cos(phi_space)
pos[:, 1, 1] = 2 * radius_space * np.sin(phi_space)
# add the vectors
layer = viewer.add_vectors(pos, edge_width=3)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_vectors_color_by_angle.py 0000664 0000000 0000000 00000002523 14370413656 0022746 0 ustar 00root root 0000000 0000000 """
Add vectors color by angle
==========================
This example generates a set of vectors in a spiral pattern.
The color of the vectors is mapped to their 'angle' feature.
.. tags:: visualization-advanced
"""
import numpy as np
from skimage import data
import napari
# create the viewer and window
viewer = napari.Viewer()
layer = viewer.add_image(data.camera(), name='photographer')
# sample vector coord-like data
n = 300
pos = np.zeros((n, 2, 2), dtype=np.float32)
phi_space = np.linspace(0, 4 * np.pi, n)
radius_space = np.linspace(0, 100, n)
# assign x-y position
pos[:, 0, 0] = radius_space * np.cos(phi_space) + 300
pos[:, 0, 1] = radius_space * np.sin(phi_space) + 256
# assign x-y projection
pos[:, 1, 0] = 2 * radius_space * np.cos(phi_space)
pos[:, 1, 1] = 2 * radius_space * np.sin(phi_space)
# make the angle feature, range 0-2pi
angle = np.mod(phi_space, 2 * np.pi)
# create a feature that is true for all angles > pi
pos_angle = angle > np.pi
# create the features dictionary.
features = {
'angle': angle,
'pos_angle': pos_angle,
}
# add the vectors
layer = viewer.add_vectors(
pos,
edge_width=3,
features=features,
edge_color='angle',
edge_colormap='husl',
name='vectors'
)
# set the edge color mode to colormap
layer.edge_color_mode = 'colormap'
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/add_vectors_image.py 0000664 0000000 0000000 00000002112 14370413656 0021044 0 ustar 00root root 0000000 0000000 """
Add vectors image
=================
This example generates an image of vectors
Vector data is an array of shape (N, M, 2)
Each vector position is defined by an (x-proj, y-proj) element where
* x-proj and y-proj are the vector projections at each center
* each vector is centered on a pixel of the NxM grid
.. tags:: visualization-basic
"""
import numpy as np
import napari
# create the viewer and window
viewer = napari.Viewer()
n = 20
m = 40
image = 0.2 * np.random.random((n, m)) + 0.5
layer = viewer.add_image(image, contrast_limits=[0, 1], name='background')
# sample vector image-like data
# n x m grid of slanted lines
# random data on the open interval (-1, 1)
pos = np.zeros(shape=(n, m, 2), dtype=np.float32)
rand1 = 2 * (np.random.random_sample(n * m) - 0.5)
rand2 = 2 * (np.random.random_sample(n * m) - 0.5)
# assign projections for each vector
pos[:, :, 0] = rand1.reshape((n, m))
pos[:, :, 1] = rand2.reshape((n, m))
# add the vectors
vect = viewer.add_vectors(pos, edge_width=0.2, length=2.5)
print(image.shape, pos.shape)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/affine_transforms.py 0000664 0000000 0000000 00000003356 14370413656 0021126 0 ustar 00root root 0000000 0000000 """
Affine transforms
=================
Display an image and its corners before and after an affine transform
.. tags:: visualization-advanced
"""
import numpy as np
import scipy.ndimage as ndi
import napari
# Create a random image
image = np.random.random((5, 5))
# Define an affine transform
affine = np.array([[1, -1, 4], [2, 3, 2], [0, 0, 1]])
# Define the corners of the image, including in homogeneous space
corners = np.array([[0, 0], [4, 0], [0, 4], [4, 4]])
corners_h = np.concatenate([corners, np.ones((4, 1))], axis=1)
viewer = napari.Viewer()
# Add the original image and its corners
viewer.add_image(image, name='background', colormap='red', opacity=.5)
viewer.add_points(corners_h[:, :-1], size=0.5, opacity=.5, face_color=[0.8, 0, 0, 0.8], name='bg corners')
# Add another copy of the image, now with a transform, and add its transformed corners
viewer.add_image(image, colormap='blue', opacity=.5, name='moving', affine=affine)
viewer.add_points((corners_h @ affine.T)[:, :-1], size=0.5, opacity=.5, face_color=[0, 0, 0.8, 0.8], name='mv corners')
# Note how the transformed corner points remain at the corners of the transformed image
# Now add the a regridded version of the image transformed with scipy.ndimage.affine_transform
# Note that we have to use the inverse of the affine as scipy does ‘pull’ (or ‘backward’) resampling,
# transforming the output space to the input to locate data, but napari does ‘push’ (or ‘forward’) direction,
# transforming input to output.
scipy_affine = ndi.affine_transform(image, np.linalg.inv(affine), output_shape=(10, 25), order=5)
viewer.add_image(scipy_affine, colormap='green', opacity=.5, name='scipy')
# Reset the view
viewer.reset_view()
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/annotate-2d.py 0000664 0000000 0000000 00000000755 14370413656 0017534 0 ustar 00root root 0000000 0000000 """
Annotate 2D
===========
Display one points layer ontop of one image layer using the ``add_points`` and
``add_image`` APIs
.. tags:: analysis
"""
import numpy as np
from skimage import data
import napari
print("click to add points; close the window when finished.")
viewer = napari.view_image(data.astronaut(), rgb=True)
points = viewer.add_points(np.zeros((0, 2)))
points.mode = 'add'
if __name__ == '__main__':
napari.run()
print("you clicked on:")
print(points.data)
napari-0.5.0a1/examples/annotate_segmentation_with_text.py 0000664 0000000 0000000 00000006355 14370413656 0024107 0 ustar 00root root 0000000 0000000 """
Annotate segmentation with text
===============================
Perform a segmentation and annotate the results with
bounding boxes and text
.. tags:: analysis
"""
import numpy as np
from skimage import data
from skimage.filters import threshold_otsu
from skimage.measure import label, regionprops_table
from skimage.morphology import closing, remove_small_objects, square
from skimage.segmentation import clear_border
import napari
def segment(image):
"""Segment an image using an intensity threshold determined via
Otsu's method.
Parameters
----------
image : np.ndarray
The image to be segmented
Returns
-------
label_image : np.ndarray
The resulting image where each detected object labeled with a unique integer.
"""
# apply threshold
thresh = threshold_otsu(image)
bw = closing(image > thresh, square(4))
# remove artifacts connected to image border
cleared = remove_small_objects(clear_border(bw), 20)
# label image regions
label_image = label(cleared)
return label_image
def make_bbox(bbox_extents):
"""Get the coordinates of the corners of a
bounding box from the extents
Parameters
----------
bbox_extents : list (4xN)
List of the extents of the bounding boxes for each of the N regions.
Should be ordered: [min_row, min_column, max_row, max_column]
Returns
-------
bbox_rect : np.ndarray
The corners of the bounding box. Can be input directly into a
napari Shapes layer.
"""
minr = bbox_extents[0]
minc = bbox_extents[1]
maxr = bbox_extents[2]
maxc = bbox_extents[3]
bbox_rect = np.array(
[[minr, minc], [maxr, minc], [maxr, maxc], [minr, maxc]]
)
bbox_rect = np.moveaxis(bbox_rect, 2, 0)
return bbox_rect
def circularity(perimeter, area):
"""Calculate the circularity of the region
Parameters
----------
perimeter : float
the perimeter of the region
area : float
the area of the region
Returns
-------
circularity : float
The circularity of the region as defined by 4*pi*area / perimeter^2
"""
circularity = 4 * np.pi * area / (perimeter ** 2)
return circularity
# load the image and segment it
image = data.coins()[50:-50, 50:-50]
label_image = segment(image)
# create the features dictionary
features = regionprops_table(
label_image, properties=('label', 'bbox', 'perimeter', 'area')
)
features['circularity'] = circularity(
features['perimeter'], features['area']
)
# create the bounding box rectangles
bbox_rects = make_bbox([features[f'bbox-{i}'] for i in range(4)])
# specify the display parameters for the text
text_parameters = {
'string': 'label: {label}\ncirc: {circularity:.2f}',
'size': 12,
'color': 'green',
'anchor': 'upper_left',
'translation': [-3, 0],
}
# initialise viewer with coins image
viewer = napari.view_image(image, name='coins', rgb=False)
# add the labels
label_layer = viewer.add_labels(label_image, name='segmentation')
shapes_layer = viewer.add_shapes(
bbox_rects,
face_color='transparent',
edge_color='green',
features=features,
text=text_parameters,
name='bounding box',
)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/bbox_annotator.py 0000664 0000000 0000000 00000007251 14370413656 0020435 0 ustar 00root root 0000000 0000000 """
bbox annotator
==============
.. tags:: gui
"""
import numpy as np
import pandas as pd
from magicgui.widgets import ComboBox, Container
from skimage import data
import napari
# set up the categorical annotation values and text display properties
box_annotations = ['person', 'sky', 'camera']
text_feature = 'box_label'
features = pd.DataFrame({
text_feature: pd.Series([], dtype=pd.CategoricalDtype(box_annotations))
})
text_color = 'green'
text_size = 20
# create the GUI for selecting the values
def create_label_menu(shapes_layer, label_feature, labels):
"""Create a label menu widget that can be added to the napari viewer dock
Parameters
----------
shapes_layer : napari.layers.Shapes
a napari shapes layer
label_feature : str
the name of the shapes feature to use the displayed text
labels : List[str]
list of the possible text labels values.
Returns
-------
label_widget : magicgui.widgets.Container
the container widget with the label combobox
"""
# Create the label selection menu
label_menu = ComboBox(label='text label', choices=labels)
label_widget = Container(widgets=[label_menu])
def update_label_menu():
"""This is a callback function that updates the label menu when
the default features of the Shapes layer change
"""
new_label = str(shapes_layer.feature_defaults[label_feature][0])
if new_label != label_menu.value:
label_menu.value = new_label
shapes_layer.events.feature_defaults.connect(update_label_menu)
def set_selected_features_to_default():
"""This is a callback that updates the feature values of the currently
selected shapes. This is a side-effect of the deprecated current_properties
setter, but does not occur when modifying feature_defaults."""
indices = list(shapes_layer.selected_data)
default_value = shapes_layer.feature_defaults[label_feature][0]
shapes_layer.features[label_feature][indices] = default_value
shapes_layer.events.features()
shapes_layer.events.feature_defaults.connect(set_selected_features_to_default)
shapes_layer.events.features.connect(shapes_layer.refresh_text)
def label_changed(value: str):
"""This is a callback that update the default features on the Shapes layer
when the label menu selection changes
"""
shapes_layer.feature_defaults[label_feature] = value
shapes_layer.events.feature_defaults()
label_menu.changed.connect(label_changed)
return label_widget
# create a stack with the camera image shifted in each slice
n_slices = 5
base_image = data.camera()
image = np.zeros((n_slices, base_image.shape[0], base_image.shape[1]), dtype=base_image.dtype)
for slice_idx in range(n_slices):
shift = 1 + 10 * slice_idx
image[slice_idx, ...] = np.pad(base_image, ((0, 0), (shift, 0)), mode='constant')[:, :-shift]
# create a viewer with a fake t+2D image
viewer = napari.view_image(image)
# create an empty shapes layer initialized with
# text set to display the box label
text_kwargs = {
'string': text_feature,
'size': text_size,
'color': text_color
}
shapes = viewer.add_shapes(
face_color='black',
features=features,
text=text_kwargs,
ndim=3
)
# create the label section gui
label_widget = create_label_menu(
shapes_layer=shapes,
label_feature=text_feature,
labels=box_annotations
)
# add the label selection gui to the viewer as a dock widget
viewer.window.add_dock_widget(label_widget, area='right', name='label_widget')
# set the shapes layer mode to adding rectangles
shapes.mode = 'add_rectangle'
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/clipboard_.py 0000664 0000000 0000000 00000002326 14370413656 0017512 0 ustar 00root root 0000000 0000000 """
Clipboard
=========
Copy screenshot of the canvas or the whole viewer to clipboard.
.. tags:: gui
"""
from qtpy.QtWidgets import QPushButton, QVBoxLayout, QWidget
from skimage import data
import napari
# create the viewer with an image
viewer = napari.view_image(data.moon())
class Grabber(QWidget):
def __init__(self) -> None:
super().__init__()
self.copy_canvas_btn = QPushButton("Copy Canvas to Clipboard", self)
self.copy_canvas_btn.setToolTip("Copy screenshot of the canvas to clipboard.")
self.copy_viewer_btn = QPushButton("Copy Viewer to Clipboard", self)
self.copy_viewer_btn.setToolTip("Copy screenshot of the entire viewer to clipboard.")
layout = QVBoxLayout(self)
layout.addWidget(self.copy_canvas_btn)
layout.addWidget(self.copy_viewer_btn)
def create_grabber_widget():
"""Create widget"""
widget = Grabber()
# connect buttons
widget.copy_canvas_btn.clicked.connect(lambda: viewer.window.qt_viewer.clipboard())
widget.copy_viewer_btn.clicked.connect(lambda: viewer.window.clipboard())
return widget
widget = create_grabber_widget()
viewer.window.add_dock_widget(widget)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/clipping_planes_interactive_.py 0000664 0000000 0000000 00000015264 14370413656 0023324 0 ustar 00root root 0000000 0000000 """
Clipping planes interactive
===========================
Display a 3D image (plus labels) with a clipping plane and interactive controls
for moving the plane
.. tags:: experimental
"""
import numpy as np
from scipy import ndimage
from skimage import data
from vispy.geometry import create_sphere
import napari
viewer = napari.Viewer(ndisplay=3)
# VOLUME and LABELS
blobs = data.binary_blobs(
length=64, volume_fraction=0.1, n_dim=3
).astype(float)
labeled = ndimage.label(blobs)[0]
plane_parameters = {
'position': (32, 32, 32),
'normal': (1, 1, 1),
'enabled': True
}
volume_layer = viewer.add_image(
blobs, rendering='mip', name='volume',
experimental_clipping_planes=[plane_parameters],
)
labels_layer = viewer.add_labels(
labeled, name='labels', blending='translucent',
experimental_clipping_planes=[plane_parameters],
)
# POINTS
points_layer = viewer.add_points(
np.random.rand(20, 3) * 64, size=5,
experimental_clipping_planes=[plane_parameters],
)
# SPHERE
mesh = create_sphere(method='ico')
sphere_vert = mesh.get_vertices() * 20
sphere_vert += 32
surface_layer = viewer.add_surface(
(sphere_vert, mesh.get_faces()),
experimental_clipping_planes=[plane_parameters],
)
# SHAPES
shapes_data = np.random.rand(3, 4, 3) * 64
shapes_layer = viewer.add_shapes(
shapes_data,
face_color=['magenta', 'green', 'blue'],
experimental_clipping_planes=[plane_parameters],
)
# VECTORS
vectors = np.zeros((20, 2, 3))
vectors[:, 0] = 32
vectors[:, 1] = (np.random.rand(20, 3) - 0.5) * 32
vectors_layer = viewer.add_vectors(
vectors,
experimental_clipping_planes=[plane_parameters],
)
def point_in_bounding_box(point, bounding_box):
if np.all(point > bounding_box[0]) and np.all(point < bounding_box[1]):
return True
return False
@viewer.mouse_drag_callbacks.append
def shift_plane_along_normal(viewer, event):
"""Shift a plane along its normal vector on mouse drag.
This callback will shift a plane along its normal vector when the plane is
clicked and dragged. The general strategy is to
1) find both the plane normal vector and the mouse drag vector in canvas
coordinates
2) calculate how far to move the plane in canvas coordinates, this is done
by projecting the mouse drag vector onto the (normalised) plane normal
vector
3) transform this drag distance (canvas coordinates) into data coordinates
4) update the plane position
It will also add a point to the points layer for a 'click-not-drag' event.
"""
# get layers from viewer
volume_layer = viewer.layers['volume']
# Calculate intersection of click with data bounding box
near_point, far_point = volume_layer.get_ray_intersections(
event.position,
event.view_direction,
event.dims_displayed,
)
# Calculate intersection of click with plane through data
intersection = volume_layer.experimental_clipping_planes[0].intersect_with_line(
line_position=near_point, line_direction=event.view_direction
)
# Check if click was on plane by checking if intersection occurs within
# data bounding box. If so, exit early.
if not point_in_bounding_box(intersection, volume_layer.extent.data):
return
# Get plane parameters in vispy coordinates (zyx -> xyz)
plane_normal_data_vispy = np.array(volume_layer.experimental_clipping_planes[0].normal)[[2, 1, 0]]
plane_position_data_vispy = np.array(volume_layer.experimental_clipping_planes[0].position)[[2, 1, 0]]
# Get transform which maps from data (vispy) to canvas
# note that we're using a private attribute here, which may not be present in future napari versions
visual2canvas = viewer.window._qt_viewer.layer_to_visual[volume_layer].node.get_transform(
map_from="visual", map_to="canvas"
)
# Find start and end positions of plane normal in canvas coordinates
plane_normal_start_canvas = visual2canvas.map(plane_position_data_vispy)
plane_normal_end_canvas = visual2canvas.map(plane_position_data_vispy + plane_normal_data_vispy)
# Calculate plane normal vector in canvas coordinates
plane_normal_canv = (plane_normal_end_canvas - plane_normal_start_canvas)[[0, 1]]
plane_normal_canv_normalised = (
plane_normal_canv / np.linalg.norm(plane_normal_canv)
)
# Disable interactivity during plane drag
volume_layer.interactive = False
labels_layer.interactive = False
labels_layer.interactive = False
points_layer.interactive = False
surface_layer.interactive = False
shapes_layer.interactive = False
vectors_layer.interactive = False
# Store original plane position and start position in canvas coordinates
original_plane_position = volume_layer.experimental_clipping_planes[0].position
start_position_canv = event.pos
yield
while event.type == "mouse_move":
# Get end position in canvas coordinates
end_position_canv = event.pos
# Calculate drag vector in canvas coordinates
drag_vector_canv = end_position_canv - start_position_canv
# Project the drag vector onto the plane normal vector
# (in canvas coorinates)
drag_projection_on_plane_normal = np.dot(
drag_vector_canv, plane_normal_canv_normalised
)
# Update position of plane according to drag vector
# only update if plane position is within data bounding box
drag_distance_data = drag_projection_on_plane_normal / np.linalg.norm(plane_normal_canv)
updated_position = original_plane_position + drag_distance_data * np.array(
volume_layer.experimental_clipping_planes[0].normal)
if point_in_bounding_box(updated_position, volume_layer.extent.data):
volume_layer.experimental_clipping_planes[0].position = updated_position
labels_layer.experimental_clipping_planes[0].position = updated_position
points_layer.experimental_clipping_planes[0].position = updated_position
surface_layer.experimental_clipping_planes[0].position = updated_position
shapes_layer.experimental_clipping_planes[0].position = updated_position
vectors_layer.experimental_clipping_planes[0].position = updated_position
yield
# Re-enable
volume_layer.interactive = True
labels_layer.interactive = True
points_layer.interactive = True
surface_layer.interactive = True
shapes_layer.interactive = True
vectors_layer.interactive = True
viewer.axes.visible = True
viewer.camera.angles = (45, 45, 45)
viewer.camera.zoom = 5
viewer.text_overlay.update(dict(
text='Drag the clipping plane surface to move it along its normal.',
font_size=20,
visible=True,
))
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/concentric-spheres.py 0000664 0000000 0000000 00000000675 14370413656 0021217 0 ustar 00root root 0000000 0000000 """
Concentric spheres
==================
Display concentric spheres in 3D.
.. tags:: visualization-nD
"""
import numpy as np
from skimage import morphology
import napari
b0 = morphology.ball(5)
b1 = morphology.ball(10)
b0p = np.pad(b0, 5)
viewer = napari.Viewer(ndisplay=3)
# viewer.add_labels(b0)
viewer.add_labels(b0p)
viewer.add_labels(b1 * 2)
viewer.add_points([[10, 10, 10]], size=1)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/cursor_position.py 0000664 0000000 0000000 00000001201 14370413656 0020644 0 ustar 00root root 0000000 0000000 """
Cursor position
===============
Add small data to examine cursor positions
.. tags:: interactivity
"""
import numpy as np
import napari
viewer = napari.Viewer()
image = np.array([[1, 0, 0, 1],
[0, 0, 1, 1],
[1, 0, 3, 0],
[0, 2, 0, 0]], dtype=int)
viewer.add_labels(image)
points = np.array([[0, 0], [2, 0], [1, 3]])
viewer.add_points(points, size=0.25)
rect = np.array([[0, 0], [3, 1]])
viewer.add_shapes(rect, shape_type='rectangle', edge_width=0.1)
vect = np.array([[[3, 2], [-1, 1]]])
viewer.add_vectors(vect, edge_width=0.1)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/cursor_ray.py 0000664 0000000 0000000 00000003324 14370413656 0017603 0 ustar 00root root 0000000 0000000 """
Cursor ray
==========
Depict a ray through a layer in 3D to demonstrate interactive 3D functionality
.. tags:: interactivity
"""
import numpy as np
import napari
sidelength_data = 64
n_points = 10
# data to depict an empty volume, its bounding box and points along a ray
# through the volume
volume = np.zeros(shape=(sidelength_data, sidelength_data, sidelength_data))
bounding_box = np.array(
[
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0],
[0, 0, 1],
[1, 0, 1],
[0, 1, 1],
[1, 1, 1],
]
) * sidelength_data
points = np.zeros(shape=(n_points, 3))
# point sizes
point_sizes = np.linspace(0.5, 2, n_points, endpoint=True)
# point colors
green = [0, 1, 0, 1]
magenta = [1, 0, 1, 1]
point_colors = np.linspace(green, magenta, n_points, endpoint=True)
# create viewer and add layers for each piece of data
viewer = napari.Viewer(ndisplay=3)
bounding_box_layer = viewer.add_points(
bounding_box, face_color='cornflowerblue', name='bounding box'
)
ray_layer = viewer.add_points(
points, face_color=point_colors, size=point_sizes, name='cursor ray'
)
volume_layer = viewer.add_image(volume, blending='additive')
# callback function, called on mouse click when volume layer is active
@volume_layer.mouse_drag_callbacks.append
def on_click(layer, event):
near_point, far_point = layer.get_ray_intersections(
event.position,
event.view_direction,
event.dims_displayed
)
if (near_point is not None) and (far_point is not None):
ray_points = np.linspace(near_point, far_point, n_points, endpoint=True)
if ray_points.shape[1] != 0:
ray_layer.data = ray_points
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/custom_key_bindings.py 0000664 0000000 0000000 00000002043 14370413656 0021447 0 ustar 00root root 0000000 0000000 """
Custom key bindings
===================
Display one 4-D image layer using the ``add_image`` API
.. tags:: gui
"""
from skimage import data
import napari
blobs = data.binary_blobs(
length=128, blob_size_fraction=0.05, n_dim=2, volume_fraction=0.25
).astype(float)
viewer = napari.view_image(blobs, name='blobs')
@viewer.bind_key('a')
def accept_image(viewer):
msg = 'this is a good image'
viewer.status = msg
print(msg)
next(viewer)
@viewer.bind_key('r')
def reject_image(viewer):
msg = 'this is a bad image'
viewer.status = msg
print(msg)
next(viewer)
def next(viewer):
blobs = data.binary_blobs(
length=128, blob_size_fraction=0.05, n_dim=2, volume_fraction=0.25
).astype(float)
viewer.layers[0].data = blobs
@napari.Viewer.bind_key('w')
def hello(viewer):
# on press
viewer.status = 'hello world!'
yield
# on release
viewer.status = 'goodbye world :('
# change viewer title
viewer.title = 'quality control images'
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/custom_mouse_functions.py 0000664 0000000 0000000 00000004275 14370413656 0022233 0 ustar 00root root 0000000 0000000 """
Custom mouse functions
======================
Display one 4-D image layer using the ``add_image`` API
.. tags:: gui
"""
import numpy as np
from scipy import ndimage as ndi
from skimage import data
from skimage.morphology import binary_dilation, binary_erosion
import napari
np.random.seed(1)
viewer = napari.Viewer()
blobs = data.binary_blobs(length=128, volume_fraction=0.1, n_dim=2)
labeled = ndi.label(blobs)[0]
labels_layer = viewer.add_labels(labeled, name='blob ID')
@viewer.mouse_drag_callbacks.append
def get_event(viewer, event):
print(event)
@viewer.mouse_drag_callbacks.append
def get_ndisplay(viewer, event):
if 'Alt' in event.modifiers:
print('viewer display ', viewer.dims.ndisplay)
@labels_layer.mouse_drag_callbacks.append
def get_connected_component_shape(layer, event):
data_coordinates = layer.world_to_data(event.position)
cords = np.round(data_coordinates).astype(int)
val = layer.get_value(data_coordinates)
if val is None:
return
if val != 0:
data = layer.data
binary = data == val
if 'Shift' in event.modifiers:
binary_new = binary_erosion(binary)
data[binary] = 0
else:
binary_new = binary_dilation(binary)
data[binary_new] = val
size = np.sum(binary_new)
layer.data = data
msg = (
f'clicked at {cords} on blob {val} which is now {size} pixels'
)
else:
msg = f'clicked at {cords} on background which is ignored'
print(msg)
# Handle click or drag events separately
@labels_layer.mouse_drag_callbacks.append
def click_drag(layer, event):
print('mouse down')
dragged = False
yield
# on move
while event.type == 'mouse_move':
print(event.position)
dragged = True
yield
# on release
if dragged:
print('drag end')
else:
print('clicked!')
# Handle click or drag events separately
@labels_layer.mouse_double_click_callbacks.append
def on_second_click_of_double_click(layer, event):
print('Second click of double_click', event.position)
print('note that a click event was also triggered', event.type)
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/dask_nD_image.py 0000664 0000000 0000000 00000001233 14370413656 0020115 0 ustar 00root root 0000000 0000000 """
Dask nD image
=============
Display a dask array
.. tags:: visualization-nD
"""
try:
from dask import array as da
except ModuleNotFoundError:
raise ModuleNotFoundError(
"""This example uses a dask array but dask is not
installed. To install try 'pip install dask'."""
) from None
import numpy as np
from skimage import data
import napari
blobs = da.stack(
[
data.binary_blobs(
length=128, blob_size_fraction=0.05, n_dim=3, volume_fraction=f
)
for f in np.linspace(0.05, 0.5, 10)
],
axis=0,
)
viewer = napari.view_image(blobs.astype(float))
if __name__ == '__main__':
napari.run()
napari-0.5.0a1/examples/dev/ 0000775 0000000 0000000 00000000000 14370413656 0015615 5 ustar 00root root 0000000 0000000 napari-0.5.0a1/examples/dev/demo_shape_creation.py 0000664 0000000 0000000 00000005424 14370413656 0022164 0 ustar 00root root 0000000 0000000 import argparse
from timeit import default_timer
import numpy as np
import napari
def create_sample_coords(n_polys=3000, n_vertices=32):
"""random circular polygons with given number of vertices"""
center = np.random.randint(0, 1000, (n_polys, 2))
radius = (
1000
/ np.sqrt(n_polys)
* np.random.uniform(0.9, 1.1, (n_polys, n_vertices))
)
phi = np.linspace(0, 2 * np.pi, n_vertices, endpoint=False)
rays = np.stack([np.sin(phi), np.cos(phi)], 1)
radius = radius.reshape((-1, n_vertices, 1))
rays = rays.reshape((1, -1, 2))
center = center.reshape((-1, 1, 2))
coords = center + radius * rays
return coords
def time_me(label, func):
# print(f'{name} start')
t = default_timer()
res = func()
t = default_timer() - t
print(f"{label}: {t:.4f} s")
return res
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="")
parser.add_argument(
"-n",
"--n_polys",
type=int,
default=5000,
help='number of polygons to show',
)
parser.add_argument(
"-t",
"--type",
type=str,
default="path",
choices=['path', 'path_concat', 'polygon', 'rectangle', 'ellipse'],
)
parser.add_argument(
"-c",
"--concat",
action="store_true",
help='concatenate all coordinates to a single mesh',
)
parser.add_argument(
"-v", "--view", action="store_true", help='show napari viewer'
)
parser.add_argument(
"--properties", action="store_true", help='add dummy shape properties'
)
args = parser.parse_args()
coords = create_sample_coords(args.n_polys)
if args.type == 'rectangle':
coords = coords[:, [4, 20]]
elif args.type == 'ellipse':
coords = coords[:, [0, 8, 16,22]]
elif args.type == 'path_concat':
args.type = 'path'
coords = coords.reshape((1, -1, 2))
print(f'number of polygons: {len(coords)}')
print(f'layer type: {args.type}')
print(f'properties: {args.properties}')
properties = {
'class': (['A', 'B', 'C', 'D'] * (len(coords) // 4 + 1))[
: len(coords)
],
}
color_cycle = ['blue', 'magenta', 'green']
kwargs = dict(
shape_type=args.type,
properties=properties if args.properties else None,
face_color='class' if args.properties else [1,1,1,1],
face_color_cycle=color_cycle,
edge_color='class' if args.properties else [1,1,1,1],
edge_color_cycle=color_cycle,
)
layer = time_me(
"time to create layer",
lambda: napari.layers.Shapes(coords, **kwargs),
)
if args.view:
# add the image
viewer = napari.Viewer()
viewer.add_layer(layer)
napari.run()
napari-0.5.0a1/examples/dev/grin.svg 0000664 0000000 0000000 00000001541 14370413656 0017276 0 ustar 00root root 0000000 0000000