pax_global_header00006660000000000000000000000064141267472120014520gustar00rootroot0000000000000052 comment=e1cd919f5f66abddc219b76926450240feaaed8f spectral-0.22.4/000077500000000000000000000000001412674721200134225ustar00rootroot00000000000000spectral-0.22.4/.github/000077500000000000000000000000001412674721200147625ustar00rootroot00000000000000spectral-0.22.4/.github/workflows/000077500000000000000000000000001412674721200170175ustar00rootroot00000000000000spectral-0.22.4/.github/workflows/python-package.yml000066400000000000000000000026531412674721200224620ustar00rootroot00000000000000# This workflow will install Python dependencies, run tests and lint with a variety of Python versions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions name: Unit Tests on: push: branches: [ master ] pull_request: branches: [ master ] jobs: build: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: [2.7, 3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install flake8 numpy pytest if [ -f requirements.txt ]; then pip install -r requirements.txt; fi git clone https://github.com/spectralpython/sample-data.git ~/spectral_data - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names flake8 . --count --select=E9,F63,F7,F822 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Run unit tests run: | SPECTRAL_DATA=~/spectral_data python -m spectral.tests.run spectral-0.22.4/.gitignore000066400000000000000000000001411412674721200154060ustar00rootroot00000000000000*.py[cod] dist/ spectral.egg-info/ build/ *# *~ TAGS .project .pydevproject spectral_test_files/ spectral-0.22.4/.travis.yml000066400000000000000000000006151412674721200155350ustar00rootroot00000000000000language: python sudo: false python: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9"] matrix: include: - python: 2.6 dist: trusty - python: 3.3 dist: trusty install: - pip install numpy - python setup.py install before_script: - git clone https://github.com/spectralpython/sample-data.git ~/spectral_data script: SPECTRAL_DATA=~/spectral_data python -m spectral.tests.run spectral-0.22.4/LICENSE.txt000066400000000000000000000020561412674721200152500ustar00rootroot00000000000000Copyright © 2002 Thomas Boggs Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. spectral-0.22.4/MANIFEST.in000066400000000000000000000000551412674721200151600ustar00rootroot00000000000000recursive-include spectral *py include *.txt spectral-0.22.4/README.rst000066400000000000000000000056741412674721200151250ustar00rootroot00000000000000Spectral Python (SPy) --------------------- .. image:: https://github.com/spectralpython/spectral/actions/workflows/python-package.yml/badge.svg?branch=master :target: https://github.com/spectralpython/spectral/actions/workflows/python-package.yml .. image:: https://badges.gitter.im/spectralpython/spectral.svg :alt: Join the chat at https://gitter.im/spectralpython/spectral :target: https://gitter.im/spectralpython/spectral?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge .. image:: https://anaconda.org/conda-forge/spectral/badges/version.svg :target: https://anaconda.org/conda-forge/spectral .. image:: https://anaconda.org/conda-forge/spectral/badges/platforms.svg :target: https://anaconda.org/conda-forge/spectral .. image:: https://anaconda.org/conda-forge/spectral/badges/license.svg :target: https://anaconda.org/conda-forge/spectral .. image:: https://anaconda.org/conda-forge/spectral/badges/downloads.svg :target: https://anaconda.org/conda-forge/spectral .. image:: https://anaconda.org/conda-forge/spectral/badges/installer/conda.svg :target: https://conda.anaconda.org/conda-forge Spectral Python (SPy) is a pure Python module for processing hyperspectral image data (imaging spectroscopy data). It has functions for reading, displaying, manipulating, and classifying hyperspectral imagery. Full details about the package are on the `web site `_. Installation Instructions ========================= The latest release is always hosted on `PyPI `_, so if you have `pip` installed, you can install SPy from the command line with .. code:: pip install spectral Packaged distributions are also hosted at `PyPI `_ and `GitHub `_ so you can download and unpack the latest zip/tarball, then type .. code:: python setup.py install To install the latest development version, download or clone the git repository and install as above. No explicit installation is required so you can simply access (or symlink) the `spectral` module within the source tree. **Finally**, up-to-date guidance on how to install via the popular conda package and environment management system can be found at official `conda-forge documentation `_. Unit Tests ========== To run the suite of unit tests, you must have `numpy` installed and you must have the `sample data files `_ downloaded to the current directory (or one specified by the `SPECTRAL_DATA` environment variable). To run the unit tests, type .. code:: python -m spectral.tests.run Dependencies ============ Using SPy interactively with its visualization capabilities requires `IPython` and several other packages (depending on the features used). See the `web site `_ for details. spectral-0.22.4/VERSIONS.txt000066400000000000000000000634741412674721200154510ustar00rootroot00000000000000================================================================================ SPy 0.22.4 ================================================================================ Release date: 2021.10.05 Bug Fixes --------- * [#3] Using np.int8 for `spy_colors` caused invalid (negative) color values ================================================================================ SPy 0.22.3 ================================================================================ Release date: 2021.10.01 Bug Fixes --------- * [#127] Use `any` instead of `not` for sequence logic * [#123,#124] Fixed Matplotlib deprecation warnings * [#130] Fix deprecation warnings caused by np.int and np.float * Fixed several missing imports leftover from code refactoring ================================================================================ SPy 0.22.2 ================================================================================ Release date: 2021.03.31 Bug Fixes --------- * [#118] Avoid Matplotlib deprecation warning. ================================================================================ SPy 0.22.1 ================================================================================ Release date: 2020.08.02 Bug Fixes --------- * Spectral database unit test files were not included with package files ================================================================================ SPy 0.22 ================================================================================ Release date: 2020.08.01 New Features (thanks to @kormang) --------------------------------- * Continuum computation and removal * See functions `spectral_continuum`, `remove_continuum`, `continuum_points` * Support for USGS spectral library (`spectral.USGSDatabase`) * SMACC algorithm for endmember extraction (`spectral.algorithms.smacc`) Bug Fixes --------- * [Issue #113] `orthogonalize` was broken in release 0.21. ================================================================================ SPy 0.21 ================================================================================ Release date: 2020.04.26 Changes ------- * As of version 0.21, Spectral Python (SPy) is released under the MIT License. * Switched most console `print` statements to logging statements for finer control of module output. * Cleaned up a lot of code to put imports at module (file) level. Bug Fixes --------- * [Issue #99] matplotlib's "cmap" keyword now works again with `spy.imshow`. * [Issue #102] Creating a `SpectralLibrary` no longer fails when some optional band parameters are not provided. ================================================================================ SPy 0.20 ================================================================================ Release date: 2019.10.06 New Features ------------ * Support for ECOSTRESS spectral library (supercedes ASTER library). * Accept "mask" and "bg" keywords in `imshow` and `get_rgb` (affects color scaling). * Denoising methods added to `PrincipalComponents` (issue #91) added by @gemmaellen. * Added "raw" to list of checked ENVI data file extensions. Changes ------- * Increase speed of `spectral_angles` computation by ~30x using `einsum`. * Eliminate `tostring` deprecation warnings by migrating to `tobytes`. Bug Fixes --------- * Support parsing ENV bbl values as floats (issue #67). * Removed "scale" keyword to handle TransformedImage objects in `envi.save_image` (issue #83). ================================================================================ SPy 0.19 ================================================================================ Release date: 2017.06.04 New Features ------------ * Rendering functions accept an "ignore" keyword to indicate a bad data value that will be ignored when calculating color histogram stretches. * Added `iterator_ij` to iterate over all pixel coordinates for a given image mask. * Added `expand_binary_mask_for_window` to expand an image mask to include all pixels within a fixed size window around each masked pixel. * Added support for bad band lists (bbl) in ENVI headers and suppress display of bad bands in spectral plots * Added optional support for non-lowercase ENVI header parameter names. Changes ------- * `principal_components` also accepts a `GaussianStats` object, which can avoid the need to recompute image/class statistics. * Added a `SpyException` base class for package-specific exceptions. * Added "raw" to the list of checked ENVI image data file extensions. Bug Fixes --------- * Fixed a bug that potentially produced incorrect results in `map_class_ids` (issue #53). * Removed unecessary import that cause python3 compatibility error in `aviris.open` (issue #54). * Removed `has_key` call breaking python3 compatibility (issue #56). ================================================================================ SPy 0.18 ================================================================================ Release date: 2016.06.18 Changes ------- * Improved handling of ENVI header files: * If "header offset" is missing, assume the offset is 0. * Stricter check for "ENVI" in the first line of the file. * [Issue #42] More-informative exceptions are raised when a problem is encountered while reading and ENVI header. * [Issue #46] No longer defaulting `band_quantity` to "Wavelength". Bug Fixes --------- * [Issue #38 and #39] The `tostring` method has been deprecated in newer versions of PIL/Pillow. Using `tobytes` now and falling back to `tostring` if it fails. * [Issue #40] An exception was raised when trying to get a pixel's row/col by CTRL-SHIFT-clicking in the ND window display. * [Issue #44] Matplotlib was being set to interactive mode even if no SPy windows were displayed. This would affect behavior of other code using Matplotlib. Interactive mode is now set only once the first display is requested. * [Issue #49] `GaussianClassifier` and `MahalanobisDistanceClassifier` method `classify_image` was failing when applied to an object that was not a numpy.ndarray (e.g., a `SpyFile` or `TransformedImage`). ================================================================================ SPy 0.17 ================================================================================ Release date: 2015.11.11 New Features ------------ * Functions `map_class_ids` and `map_classes` were added for mapping class indices between images. * `view_nd` now accepts custom axis labels. * `ImageArray` and `SpyFile` have `asarray` method that provides numpy array interface. Changes ------- * ENVI header parameter names are case insensitive (converted to lower-case after being read). * `ImageArray` objects have additional `SpyFile` methods/attributes and indexing behavior is now like `SpyFile`'s. * An exception is now raised when attempting to open or save an image in ENVI format with nonzero major or minor frame offsets (see issue #35). Bug Fixes --------- * [Issue #27] ImageView.__str__ failed if image did not have "bands" in metadata. ================================================================================ SPy 0.16.2 ================================================================================ Release date: 2015.04.27 Bug_Fix release. Bug Fixes --------- * [Issue #19] Fixed handling of intereave keyword. * [Issue #20] envi.save_image fails when interleave keyword is provided for single-band images. * [Issue #21] offset keyword has not effect in envi.create_image. * [Issue #22] setup.py fails if numpy is not already installed. * [Issue #24] save_rgb fails when format keyword is given. * [Issue #25] view_cube fails in HypercubeWindow.load_textures. * [Issue #26] Too few colors in view_cube side faces. ================================================================================ SPy 0.16.1 ================================================================================ Release date: 2014.10.18 Bug_Fix release. Bug Fixes --------- * [Issue #18] Missing import statements for pixel iterators. ================================================================================ SPy 0.16.0 ================================================================================ Release date: 2014.10.18 New Features ------------ * Adaptive Coherence/Cosine Estimator (ACE) target detector * Pixel Purity Index (PPI) * Adjustable linear color stretches (based on data limits or cumulative histogram) * Ability to save ENVI Classification files (thanks to @ohspite) * `ImageView` class has `set_title` method Changes ------- * `imshow` image displays use a linear 2% color stretch by default (can override this in `spectral.settings`) * Limited Python 3 compatibility (all functions except `view_cube` and `view_nd`) supported * `get_rgb` handles bands with no variation (color set to min value) * Modified `view_nd` to support change in PyOpenGL API Bug Fixes --------- * [Issue #16] Incorrect handling of alternate file extensions in `envi.save_image` * [Issue #11] Incorrect handling of unsigned byte data type in ENVI files * [Issue #13] As of version 1.9, `numpy` no longer provides `oldnumeric` submodule ================================================================================ SPy 0.15.0 ================================================================================ Release date: 2014.06.04 New Features ------------ * Added Minimum Noise Fraction (`mnf`) algorithm (a.k.a., Noise-Adjusted Principal Components). An associated `noise_from_diffs` function enables estimation of image noise from a homogeneous region of the image. Changes ------- * When calling `envi.save_image`, assume an ndarray with two dimensions is a single-band image (i.e., don't require an explicit third dimension). * [Issue #9] All SpyFile subclass read methods now have an optional `use_memmap` argument to indicate whether the memmap interface should be used (vice direct file read) on a per-call basis. Default values are specific to the particular method and file interleave. Bug Fixes --------- * [Issue #7] Handle recognize comment lines in ENVI headers and accept blank parameter values in the header. Thanks to Don March (http://ohspite.net) * [Issue #2] Garbage results were being generated for several algorithms when a NaN value was present in the image data. Reasonable checks are now performed in several algorithms and an optional `allow_nan` argument (False by default) was added to `calc_stats`. * [Issue #1] For images with more rows than columns, the row/col of the pixel under the mouse cursor did not display if the row index was greater than the image width. Performance Improvements ------------------------ * [Issue #5] Improved BilFile.read_bands performance. Thanks to Don March (http://ohspite.net) * [Issue #8] Faster creation/display of RGB images for display. Thanks to Don March (http://ohspite.net) ================================================================================ SPy 0.14.0 ================================================================================ Release date: 2014.02.22 Changes ------- * Attempt to use Pillow fork of PIL, if available, rather than older PIL. * `view_cube` now uses common color scale limits on all side faces. * When creating an `AsterDatabase` instance, directories in the `SPECTRAL_DATA` environment variable are search for the specified file (after the current directory). * `spectral.imshow` accepts an optional `fignum` argument to render to an existing figure. * Class labels in a `spectral.imshow` window can be reassigned even when class labels were not provided in the function call (all pixels will start with class 0). * File `spectral/algorithms/perceptron.py` can be used independently of the rest of the package. Bug Fixes --------- * Front and left sides of the image cube displayed by `view_cube` were mirrored left-right. Cube aspect ratio was being computed incorrectly for non-square images. These bugs were introduced by a recent release. * Global covariance was not being scaled properly in the `MahalanobisDistanceClassifier`. Mathematically, it does not affect results and did not affect results on the test data but for large covariance with many classes, it could have cause rounding/truncation that would affect results. * PerceptronClassifier constructor was failing due to recent changes in base class code. Unit tests have been added to ensure it continues to work properly. Performance Improvements ------------------------ * PerceptronClassifier is roughly an order of magnitude faster due to better use of numpy. Inputs are now scaled and weights are initialized withing the data limits, which usually results in fewer iterations for convergence. ================================================================================ SPy 0.13 ================================================================================ Release date: 2014.01.05 New Features ------------ * New features for image displays: * Image rasters can be switched between *data*, *class*, and *class overlay* view, using adjustable class alpha transparency. * Image pixel regions can be interactively assigned to new classes using mouse/keyboard click & drag interaction. * A "zoom" window can be opened from an image view by pressing "z". The zoom window center point can be panned to a new location by holding CONTROL and left-clicking in the original window. * Image pixel interpolation can be switched between the module default and *nearest* (by pressing "i"). * The `view_nd` window can open a class map raster view (by pressing "c") that dynamically shows changed class values. * Added a `matched_filter` function, which wraps the `MatchedFilter` class and allows using local background stats using an inner/outer window. * `rx` accepts an optional *cov* argument, which specifies a global background covariance. When this argument is given, the background mean will be computed using a moving inner/outer window but the covariance will be fixed. * Function `cov_avg` computes covariance averaged over multiple classes (using the mean of each class. * Added `msam` (Modified SAM) function by Oshigami et al (code was contributed by Christian Mielke). * `matrix_sqrt` optionally returns the square root of the matrix inverse. * Created the `WindowedGaussianBackgroundMapper` to easily apply any pixel-based estimator to an image using local background stats computed for an inner/outer window. Performance Improvements ------------------------ * Significant speedup for sevaral algorithms using ndarray inputs: * 14x speedup for GMCL * 7x speedup for Mahalanobis distance classification * 3x speedup for `kmeans` * Windowed `rx` is significanty faster from using cached covariance. * `matrix_sqrt` accepts an optional *eigs* arguments that makes square root calculation significantly faster. * `GaussianStats` object caches eigen-decomposition, make calculation of other covariance-derived stats much faster. Changes ------- * Eigenvectors in `PrincipalComponents` objects (returned by `principal_components`) are now stored in columns (they were in rows) to be consistent with numpy. An explicit check is not performed to ensure eigenvalues are in descending order. * Converted `GaussianStats` constructor keyword args to optional positional args. * Added numerouse new unit tests. * Simplified the unit testing interface, eliminating the need for `test_method` decorator and `TestCase` subclass `run` methods. * Converted `SpySettings` object attributes to class attributes. * Removed `save_training_sets`. Use `TrainingClassSet.save` instead. Bug Fixes --------- * Exception occured when *bands* argument was not provided to `imshow`. * *stretch_all* parameter for `get_rgb` was sometimes being used when set to False or 0 (because it was not None). * Fixed broken implementation of `load_training_sets`. `save_training_sets` was also fixed (but implementation is replaced by TrainingClassSet.save`. * Fixed a scaling error in `linear_discriminant` and divide by N-1 to get unbiased estimate of *cov_b*. ================================================================================ SPy 0.12 ================================================================================ Release date: 2013.09.06 New Features ------------ * Added a wrapper around matplotlib's `imshow` to easily display HSI data. * A new memmap interface is provided for SpyFile objects (via the `open_memmap` method), including the ability to open writable memmaps. * Algorithm progress display can be enabled/disabled via the settings object. * RX anomaly detection can be performed using local statistics by specifying an inner/outer window. * A custom background color can be specified when calling `view_cube`. * Summary statistics are printed for unit test execution. Changes ------- * `get_image_display_data` has been renamed `get_rgb`. * `view_cube` will also accept an ndaray as the "top" keyword. * If present, image band info is saved when `envi.save_image` is called. * Allow calling :func:`~spectral.oi.envi.create_image` using keyword args instead of ENVI-specific header paramter names. * `save_rgb` automatically determines the output file type, based on the filename extension. * Results of several ImageArray methods will be cast to an ndarray. * The Image base class is now a new-style class. Bug Fixes --------- * Eliminated texture-wrapping display artifact near edges of displayed image cubes (called via `view_cube`). * RX.__call__ was failing when image statistics were not provided to class constructor. * Applied Ferdinand Deger's bugfix for `envi.create_image`. * `ImageView` objects raised an exception when the *data* constructor arg was a `SpyFile` object. * `wx.NewId` occasionally returns a weird number (large negative value), which raises an exception in wx. Now caching `NDWindow` right-click menu ID values to avoid this wx bug. ================================================================================ SPy 0.11 ================================================================================ Release date: 2013.04.03 New Features ------------ * RX anomaly detector. * Ability to save and create images in ENVI format. * Added `GaussianStats` class (returned by `calc_stats`). This class can be transformed by a `LinearTransform`. It has a `get_whitening_transform` method that returns a callable transform to whiten image data. * Added a unit-testing sub-package (`spectral.tests`) Changes ------- * Changed severals function to accept GaussianStats objects instead of sepaarate mean & covariance. * Changed names of several functions for consistency: - `open_image` replaces `image`. - `save_rgb` replaces `save_image` * Improved support for additional data types by reading byte strings into numpy arrays with dtypes instead of using builtin array module. Bug Fixes --------- * 32-bit integer image data was not being read properly. ================================================================================ SPy 0.10.1 ================================================================================ Release date: 2013.02.23 This is a bug-fix release that corrects the spectrum displayed when double- clicking on a raster display. Version 0.10 introduced a bug that had the row/column swapped, resulting in either the wrong pixel being plotted or an exception raised. ================================================================================ SPy 0.10 ================================================================================ Release date: 2013.02.17 As of this release, SPy now uses IPython for non-blocking GUI windows. IPython should be started in "--pylab" mode with the appropriate backend set (see :ref:`starting_ipython`). The standard python interpreter can still be used if GUI functions are not being called. New Features ------------ * `LinearTransform` and `transform_image` now handle scalar transforms. * All functions opening a GUI window will now return a proxy object to enable access to any associated data (e.g., accessing changed class values in an N-D data display). * GUI functions are now aware of differences in wxWidgets versions (2.8.x vs. 2.9.x). Changes ------- * SPy no longer requires explicit creation of a new wx thread. Instead, running SPy interactively with GUI functions now requires using IPython in "pylab" mode. * A few functions have been renamed for consistency: * `hypercube` is now `view_cube`. * `ndwindow is now `view_nd`. * numpy is used for more covariance calculations (e.g., class-specific covariance) to improve performance on multi-core systems. * Two new parameters have been added to the `spectral.settings` object: 1. `START_WX_APP` : If this parameter is True and no wx App exists when a GUI function is called, then an App will be started to prevent an error. 2. `WX_GL_DEPTH_SIZE` : If the default GL_DEPTH_SIZE is not supported by the host system (resulting in a blank GLCanvas in `view_cube` or `view_nd`), this parameter can be reduced (e.g., to 24 or 16) to enable OpenGL rendering. Bug Fixes --------- * Spectral plotting failed when double-clicking a transformed image due to band info being unavailable. A check is now performed to prevent this. * OpenGL-related calls will no longer fail if GLUT or an associated font is not available. ================================================================================ SPy 0.9 ================================================================================ Release date: 2013.01.23 - New Features - Added a linear target detector (MatchedFilter). - Added a class for linear transformations (LinearTransform). - Changes - `principal_components` function now returns a object, which contains return values previously in a tuple, , as well as the associated linear transform, and a `reduce` method. - `linear_discriminant` function now returns an object, which contains return values previously in a tuple, as well as the associated linear transform. - Covariance calculation is now performed using 64-bit floats. - Bug Fixes - Fixed a bug causing `ndwindow` to fail when no class mask is passed as a keyword arg. ================================================================================ SPy 0.8 Release Notes ================================================================================ Release date: 2012.07.15 - New Features - The :func:`~spectral.graphics.ndwindow.ndwindow` function enables viewing of high-dimensional images in a 3D display. See :ref:`nd_displays` for details. - Changes - Hypercube display now uses mouse control for pan/zoom/rotate. - Bug Fixes - Fixed a bug in several deprecation warnings that caused infinte recursion. - Fixed mismatch in parameter names in kmeans_ndarray. ================================================================================ SPy 0.7 Release Notes ================================================================================ Release date: 2012.02.19 - Changes - Changed many function/method names to be more consistent with external packages. Use of most old names will generate a deprecation warning but some will require immediate changes to client code. - :func:`spectral.kmeans` runs about 10 times faster now for loaded images. - Bug Fixes - The Erdas LAN file interface was modified because the previous reference file had mixed endian between header and data sections. If you are using the old sample file "92AV3C", then start using the "92AV3C.lan" file available on the web site (see Intro section of the user's guide). This file has consistent endian-ness between header and image data sections - Fixed a few bugs that potentially caused problems with the BIP and BSQ file interfaces. The specific methods fixed are: * BipFile.read_bands * BsqFile.read_subregion * BsqFile.read_subimage ================================================================================ SPy 0.6 Release Notes ================================================================================ Release date: 2011.01.17 - New Features: - Support for parsing & loading spectra from the ASTER Spectral Library. - Ability to save ENVI spectral library files. - :meth:`spectral.kmeans` will accept a :exc:`KeyboardInterrupt` exception (i.e., CTRL-C) and return the results as of the previous iteration. - Documention is now online via Sphinx. - Changes - Major changes to module/sub-module layout. Biggest change is that the top- level module is now "spectral" instead of "Spectra" (capitalization). Many functions/classes have moved between files and sub-modules but that should be transparent to most users since the most obvious names are automatically imported into the top-level module namespace. - Additional ENVI data file extensions are now searched (.bil, .bsq, .bip,) - Changed default colors in :obj:`spectral.spyColors`, which is the default color palette used by :meth:`spectral.viewIndexed`. - :meth:`spectral.transformImage` is now the means to apply a linear transform to an image (rather than creating a :class:`spectral.TransformedImage` directly) because it handles both :class:`spectral.SpyFile` and :class:`numpy.ndarray` objects. - 64-bit floats are now used for covariance matrices. - Changed SPECTRAL_DATA path delimiter from semi-colon to colon. - Bug fixes - Fixed a bug preventing successful reading of ENVI header files where an equal ("=") symbol appears in a parameter value. - Fixed bug where a ColorScale might return an incorrect color if the scale contained negative values. - :meth:`cluster` would fail if only 2 clusters were requested. - :meth:`kmeans` was reporting an incorrect number of pixels reassigned between iterations (did not affect final convergence). - :meth:`logDeterminant` was crashing when receiving negative values. - Missing a potential byte swap in :meth:`spectral.io.bilfileBilFile.readDatum`. spectral-0.22.4/setup.py000066400000000000000000000052561412674721200151440ustar00rootroot00000000000000#!/usr/bin/env python import ast import re try: from setuptools import setup except: from distutils.core import setup # taken from Flask _version_re = re.compile(r'__version__\s+=\s+(.*)') with open('spectral/__init__.py', 'rb') as f: version = str(ast.literal_eval(_version_re.search( f.read().decode('utf-8')).group(1))) long_description = '''Spectral Python (SPy) is a pure Python module for processing hyperspectral image data (imaging spectroscopy data). It has functions for reading, displaying, manipulating, and classifying hyperspectral imagery. SPy is Free, Open Source Software (FOSS) distributed under the MIT License.''' setup(name='spectral', version=version, description='Spectral Python (SPy) is a Python module for hyperspectral image processing.', long_description=long_description, author='Thomas Boggs', author_email='thomas.boggs@gmail.com', license='MIT', url='http://spectralpython.net', download_url='https://github.com/spectralpython/spectral/releases/latest', packages=['spectral', 'spectral.algorithms', 'spectral.database', 'spectral.graphics', 'spectral.io', 'spectral.tests', 'spectral.utilities'], package_data={'spectral.tests': ['data/*.spc', 'data/ecostress/*.txt', 'data/usgs/ASCIIdata/liba/*.txt', 'data/usgs/ASCIIdata/liba/ChapterB_b0/*.txt', 'data/usgs/ASCIIdata/liba/ChapterD_d0/*.txt', 'data/usgs/ASCIIdata/libc/*.txt', 'data/usgs/ASCIIdata/lib/errorbars/*.txt', 'data/usgs/ASCIIdata/libc/ChapterB_b0/*.txt', 'data/usgs/ASCIIdata/libc/ChapterD_d0/*.txt']}, platforms=['Platform-Independent'], install_requires=['numpy'], classifiers=['Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Environment :: Console', 'Natural Language :: English', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Image Recognition', 'Topic :: Scientific/Engineering :: GIS', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Scientific/Engineering :: Visualization']) spectral-0.22.4/spectral/000077500000000000000000000000001412674721200152375ustar00rootroot00000000000000spectral-0.22.4/spectral/__init__.py000066400000000000000000000012771412674721200173570ustar00rootroot00000000000000''' Basic package setup and global imports. ''' from __future__ import absolute_import, division, print_function, unicode_literals __version__ = '0.22.4' import sys if sys.byteorder == 'little': byte_order = 0 # little endian else: byte_order = 1 # big endian BSQ = 0 BIL = 1 BIP = 2 from .utilities.errors import SpyException from .config import SpySettings, spy_colors settings = SpySettings() from .spectral import (open_image, load_training_sets, BandInfo) from .io import * from .algorithms import * from .graphics import * from .database import * # Import some submodules into top-level namespace from .algorithms import detectors from .spectral import _init _init() del _init spectral-0.22.4/spectral/algorithms/000077500000000000000000000000001412674721200174105ustar00rootroot00000000000000spectral-0.22.4/spectral/algorithms/__init__.py000066400000000000000000000013071412674721200215220ustar00rootroot00000000000000from __future__ import absolute_import, division, print_function, unicode_literals from .algorithms import (mean_cov, covariance, principal_components, bdist, linear_discriminant, create_training_classes, ndvi, orthogonalize, transform_image, unmix, spectral_angles, calc_stats, cov_avg, msam, noise_from_diffs, mnf, GaussianStats, ppi, smacc) from .classifiers import * from .clustering import L1, L2, kmeans from .resampling import BandResampler from .transforms import LinearTransform from .detectors import * from .spatial import * from .continuum import spectral_continuum, remove_continuum, continuum_points spectral-0.22.4/spectral/algorithms/algorithms.py000066400000000000000000001752471412674721200221530ustar00rootroot00000000000000''' Basic algorithms and data handling code. ''' from __future__ import absolute_import, division, print_function, unicode_literals import math from numbers import Integral import numpy as np import pickle import spectral as spy from ..io.spyfile import SpyFile, TransformedImage from ..utilities.errors import has_nan, NaNValueError from .spymath import matrix_sqrt from .transforms import LinearTransform class Iterator: ''' Base class for iterators over pixels (spectra). ''' def __init__(self): pass def __iter__(self): raise NotImplementedError('Must override __iter__ in child class.') def get_num_elements(self): raise NotImplementedError( 'Must override get_num_elements in child class.') def get_num_bands(self): raise NotImplementedError( 'Must override get_num_bands in child class.') class ImageIterator(Iterator): ''' An iterator over all pixels in an image. ''' def __init__(self, im): self.image = im self.numElements = im.shape[0] * im.shape[1] def get_num_elements(self): return self.numElements def get_num_bands(self): return self.image.shape[2] def __iter__(self): (M, N) = self.image.shape[:2] count = 0 for i in range(M): self.row = i for j in range(N): self.col = j yield self.image[i, j] class ImageMaskIterator(Iterator): ''' An iterator over all pixels in an image corresponding to a specified mask. ''' def __init__(self, image, mask, index=None): if mask.shape != image.shape[:len(mask.shape)]: raise ValueError('Mask shape does not match image.') self.image = image self.index = index # Get the proper mask for the training set if index: self.mask = np.equal(mask, index) else: self.mask = np.not_equal(mask, 0) self.n_elements = sum(self.mask.ravel()) def get_num_elements(self): return self.n_elements def get_num_bands(self): return self.image.shape[2] def __iter__(self): coords = np.argwhere(self.mask) for (i, j) in coords: (self.row, self.col) = (i, j) yield self.image[i, j].astype(self.image.dtype).squeeze() def iterator(image, mask=None, index=None): ''' Returns an iterator over pixels in the image. Arguments: `image` (ndarray or :class:`spectral.Image`): An image over whose pixels will be iterated. `mask` (ndarray) [default None]: An array of integers that specify over which pixels in `image` iteration should be performed. `index` (int) [default None]: Specifies which value in `mask` should be used for iteration. Returns (:class:`spectral.Iterator`): An iterator over image pixels. If neither `mask` nor `index` are defined, iteration is performed over all pixels. If `mask` (but not `index`) is defined, iteration is performed over all pixels for which `mask` is nonzero. If both `mask` and `index` are defined, iteration is performed over all pixels `image[i,j]` for which `mask[i,j] == index`. ''' if isinstance(image, Iterator): return image elif mask is not None: return ImageMaskIterator(image, mask, index) else: return ImageIterator(image) def iterator_ij(mask, index=None): ''' Returns an iterator over image pixel coordinates for a given mask. Arguments: `mask` (ndarray) [default None]: An array of integers that specify which coordinates should be returned. `index` (int) [default None]: Specifies which value in `mask` should be used for iteration. Returns: An iterator over image pixel coordinates. Each returned item is a 2-tuple of the form (row, col). If `index` is not defined, iteration is performed over all non-zero elements. If `index` is defined, iteration is performed over all coordinates for whch `mask[i,j] == index`. ''' if mask.ndim != 2: raise ValueError('Invalid mask shape.') if index is None: mask = mask != 0 else: mask = mask == index for rc in np.argwhere(mask): yield tuple(rc) def mean_cov(image, mask=None, index=None): ''' Return the mean and covariance of the set of vectors. Usage:: (mean, cov, S) = mean_cov(vectors [, mask=None [, index=None]]) Arguments: `image` (ndarrray, :class:`~spectral.Image`, or :class:`spectral.Iterator`): If an ndarray, it should have shape `MxNxB` and the mean & covariance will be calculated for each band (third dimension). `mask` (ndarray): If `mask` is specified, mean & covariance will be calculated for all pixels indicated in the mask array. If `index` is specified, all pixels in `image` for which `mask == index` will be used; otherwise, all nonzero elements of `mask` will be used. `index` (int): Specifies which value in `mask` to use to select pixels from `image`. If not specified but `mask` is, then all nonzero elements of `mask` will be used. If neither `mask` nor `index` are specified, all samples in `vectors` will be used. Returns a 3-tuple containing: `mean` (ndarray): The length-`B` mean vectors `cov` (ndarray): The `BxB` unbiased estimate (dividing by N-1) of the covariance of the vectors. `S` (int): Number of samples used to calculate mean & cov Calculate the mean and covariance of of the given vectors. The argument can be an Iterator, a SpyFile object, or an `MxNxB` array. ''' status = spy._status if isinstance(image, np.ndarray): X = image.astype(np.float64) if X.ndim == 3: X = image.reshape(-1, image.shape[-1]).T if mask is not None: mask = mask.ravel() if index is not None: ii = np.argwhere(mask == index) else: ii = np.argwhere(mask != 0) X = np.take(X, ii.squeeze(), axis=1) m = np.average(X, axis=1) C = np.cov(X) return (m, C, X.shape[1]) if not isinstance(image, Iterator): it = iterator(image, mask, index) else: it = image nSamples = it.get_num_elements() B = it.get_num_bands() sumX = np.zeros((B,), 'd') sumX2 = np.zeros((B, B), 'd') count = 0 statusInterval = max(1, nSamples / 100) status.display_percentage('Covariance.....') for x in it: if not count % statusInterval: status.update_percentage(float(count) / nSamples * 100.) count += 1 sumX += x x = x.astype(np.float64)[:, np.newaxis] sumX2 += x.dot(x.T) mean = (sumX / count) sumX = sumX[:, np.newaxis] cov = (sumX2 - sumX.dot(sumX.T) / count) / (count - 1) status.end_percentage() return (mean, cov, count) def cov_avg(image, mask, weighted=True): '''Calculates the covariance averaged over a set of classes. Arguments: `image` (ndarrray, :class:`~spectral.Image`, or :class:`spectral.Iterator`): If an ndarray, it should have shape `MxNxB` and the mean & covariance will be calculated for each band (third dimension). `mask` (integer-valued ndarray): Elements specify the classes associated with pixels in `image`. All pixels associeted with non-zero elements of `mask` will be used in the covariance calculation. `weighted` (bool, default True): Specifies whether the individual class covariances should be weighted when computing the average. If True, each class will be weighted by the number of pixels provided for the class; otherwise, a simple average of the class covariances is performed. Returns a class-averaged covariance matrix. The number of covariances used in the average is equal to the number of non-zero elements of `mask`. ''' ids = set(mask.ravel()) - set((0,)) classes = [calc_stats(image, mask, i) for i in ids] N = sum([c.nsamples for c in classes]) if weighted: return np.sum([((c.nsamples - 1) / float(N - 1)) * c.cov for c in classes], axis=0, dtype=np.float64) else: return np.mean([c.cov for c in classes], axis=0, dtype=np.float64) def covariance(*args): ''' Returns the covariance of the set of vectors. Usage:: C = covariance(vectors [, mask=None [, index=None]]) Arguments: `vectors` (ndarrray, :class:`~spectral.Image`, or :class:`spectral.Iterator`): If an ndarray, it should have shape `MxNxB` and the mean & covariance will be calculated for each band (third dimension). `mask` (ndarray): If `mask` is specified, mean & covariance will be calculated for all pixels indicated in the mask array. If `index` is specified, all pixels in `image` for which `mask == index` will be used; otherwise, all nonzero elements of `mask` will be used. `index` (int): Specifies which value in `mask` to use to select pixels from `image`. If not specified but `mask` is, then all nonzero elements of `mask` will be used. If neither `mask` nor `index` are specified, all samples in `vectors` will be used. Returns: `C` (ndarray): The `BxB` unbiased estimate (dividing by N-1) of the covariance of the vectors. To also return the mean vector and number of samples, call :func:`~spectral.algorithms.algorithms.mean_cov` instead. ''' return mean_cov(*args)[1] class PrincipalComponents: ''' An object for storing a data set's principal components. The object has the following members: `eigenvalues`: A length B array of eigenvalues sorted in descending order `eigenvectors`: A `BxB` array of normalized eigenvectors (in columns) `stats` (:class:`GaussianStats`): A statistics object containing `mean`, `cov`, and `nsamples`. `transform`: A callable function to transform data to the space of the principal components. `reduce`: A method to return a reduced set of principal components based on either a fixed number of components or a fraction of total variance. `denoise`: A callable function to denoise data using a reduced set of principal components. `get_denoising_transform`: A callable function that returns a function for denoising data. ''' def __init__(self, vals, vecs, stats): self.eigenvalues = vals self.eigenvectors = vecs self.stats = stats self.transform = LinearTransform(self.eigenvectors.T, pre=-self.mean) @property def mean(self): return self.stats.mean @property def cov(self): return self.stats.cov def reduce(self, N=0, **kwargs): '''Reduces the number of principal components. Keyword Arguments (one of the following must be specified): `num` (integer): Number of eigenvalues/eigenvectors to retain. The top `num` eigenvalues will be retained. `eigs` (list): A list of indices of eigenvalues/eigenvectors to be retained. `fraction` (float): The fraction of total image variance to retain. Eigenvalues will be retained (starting from greatest to smallest) until `fraction` of total image variance is retained. ''' status = spy._status num = kwargs.get('num', None) eigs = kwargs.get('eigs', None) fraction = kwargs.get('fraction', None) if num is not None: return PrincipalComponents(self.eigenvalues[:num], self.eigenvectors[:, :num], self.stats) elif eigs is not None: vals = self.eigenvalues[eigs] vecs = self.eigenvectors[:, eigs] return PrincipalComponents(vals, vecs, self.stats) elif fraction is not None: if not 0 < fraction <= 1: raise Exception('fraction must be in range (0,1].') N = len(self.eigenvalues) cumsum = np.cumsum(self.eigenvalues) sum = cumsum[-1] # Count how many values to retain. for i in range(N): if (cumsum[i] / sum) >= fraction: break if i == (N - 1): # No reduction status.write('No reduction in eigenvectors achieved.') return self vals = self.eigenvalues[:i + 1] vecs = self.eigenvectors[:, :i + 1] return PrincipalComponents(vals, vecs, self.stats) else: raise Exception('Must specify one of the following keywords:' '`num`, `eigs`, `fraction`.') def denoise(self, X, **kwargs): '''Returns a de-noised version of `X`. Arguments: `X` (np.ndarray): Data to be de-noised. Can be a single pixel or an image. Keyword Arguments (one of the following must be specified): `num` (integer): Number of eigenvalues/eigenvectors to use. The top `num` eigenvalues will be used. `eigs` (list): A list of indices of eigenvalues/eigenvectors to be used. `fraction` (float): The fraction of total image variance to retain. Eigenvalues will be included (starting from greatest to smallest) until `fraction` of total image variance is retained. Returns denoised image data with same shape as `X`. Note that calling this method is equivalent to calling the `get_denoising_transform` method with same keyword and applying the returned transform to `X`. If you only intend to denoise data with the same parameters multiple times, then it is more efficient to get the denoising transform and reuse it, rather than calling this method multilple times. ''' f = self.get_denoising_transform(**kwargs) return f(X) def get_denoising_transform(self, **kwargs): '''Returns a function for denoising image data. Keyword Arguments (one of the following must be specified): `num` (integer): Number of eigenvalues/eigenvectors to use. The top `num` eigenvalues will be used. `eigs` (list): A list of indices of eigenvalues/eigenvectors to be used. `fraction` (float): The fraction of total image variance to retain. Eigenvalues will be included (starting from greatest to smallest) until `fraction` of total image variance is retained. Returns a callable :class:`~spectral.algorithms.transforms.LinearTransform` object for denoising image data. ''' V = self.reduce(self, **kwargs).eigenvectors f = LinearTransform(V.dot(V.T), pre=-self.mean, post=self.mean) return f def principal_components(image): ''' Calculate Principal Component eigenvalues & eigenvectors for an image. Usage:: pc = principal_components(image) Arguments: `image` (ndarray, :class:`spectral.Image`, :class:`GaussianStats`): An `MxNxB` image Returns a :class:`~spectral.algorithms.algorithms.PrincipalComponents` object with the following members: `eigenvalues`: A length B array of eigenvalues `eigenvectors`: A `BxB` array of normalized eigenvectors `stats` (:class:`GaussianStats`): A statistics object containing `mean`, `cov`, and `nsamples`. `transform`: A callable function to transform data to the space of the principal components. `reduce`: A method to reduce the number of eigenvalues. `denoise`: A callable function to denoise data using a reduced set of principal components. `get_denoising_transform`: A callable function that returns a function for denoising data. ''' if isinstance(image, GaussianStats): stats = image else: stats = calc_stats(image) (L, V) = np.linalg.eig(stats.cov) # numpy says eigenvalues may not be sorted so we'll sort them, if needed. if not np.alltrue(np.diff(L) <= 0): ii = list(reversed(np.argsort(L))) L = L[ii] V = V[:, ii] return PrincipalComponents(L, V, stats) class FisherLinearDiscriminant: ''' An object for storing a data set's linear discriminant data. For `C` classes with `B`-dimensional data, the object has the following members: `eigenvalues`: A length `C-1` array of eigenvalues `eigenvectors`: A `BxC` array of normalized eigenvectors `mean`: The length `B` mean vector of the image pixels (from all classes) `cov_b`: The `BxB` matrix of covariance *between* classes `cov_w`: The `BxB` matrix of average covariance *within* each class `transform`: A callable function to transform data to the space of the linear discriminant. ''' def __init__(self, vals, vecs, mean, cov_b, cov_w): self.eigenvalues = vals self.eigenvectors = vecs self.mean = mean self.cov_b = cov_b self.cov_w = cov_w self.transform = LinearTransform(self.eigenvectors.T, pre=-self.mean) def linear_discriminant(classes, whiten=True): ''' Solve Fisher's linear discriminant for eigenvalues and eigenvectors. Usage: (L, V, Cb, Cw) = linear_discriminant(classes) Arguments: `classes` (:class:`~spectral.algorithms.TrainingClassSet`): The set of `C` classes to discriminate. Returns a `FisherLinearDiscriminant` object containing the within/between- class covariances, mean vector, and a callable transform to convert data to the transform's space. This function determines the solution to the generalized eigenvalue problem Cb * x = lambda * Cw * x Since cov_w is normally invertable, the reduces to (inv(Cw) * Cb) * x = lambda * x References: Richards, J.A. & Jia, X. Remote Sensing Digital Image Analysis: An Introduction. (Springer: Berlin, 1999). ''' C = len(classes) # Number of training sets rank = len(classes) - 1 classes.calc_stats() # Calculate total # of training pixels and total mean N = 0 B = classes.nbands K = len(classes) mean = np.zeros(B, dtype=np.float64) for s in classes: N += s.size() mean += s.size() * s.stats.mean mean /= N cov_b = np.zeros((B, B), np.float64) # cov between classes cov_w = np.zeros((B, B), np.float64) # cov within classes for s in classes: cov_w += ((s.size() - 1) / float(N - 1)) * s.stats.cov m = s.stats.mean - mean cov_b += (s.size() / float(N) / (K - 1)) * np.outer(m, m) inv_cov_w = np.linalg.inv(cov_w) (vals, vecs) = np.linalg.eig(inv_cov_w.dot(cov_b)) vals = vals[:rank] vecs = vecs[:, :rank] if whiten: # Diagonalize cov_within in the new space v = vecs.T.dot(cov_w).dot(vecs) d = np.sqrt(np.diag(v) * np.diag(v).conj()) for i in range(vecs.shape[1]): vecs[:, i] /= math.sqrt(d[i].real) return FisherLinearDiscriminant(vals.real, vecs.real, mean, cov_b, cov_w) # Alias for Linear Discriminant Analysis (LDA) lda = linear_discriminant def log_det(x): return sum(np.log([eigv for eigv in np.linalg.eigvals(x) if eigv > 0])) class GaussianStats(object): '''A class for storing Gaussian statistics for a data set. Statistics stored include: `mean`: Mean vector `cov`: Covariance matrix `nsamples`: Number of samples used in computing the statistics Several derived statistics are computed on-demand (and cached) and are available as property attributes. These include: `inv_cov`: Inverse of the covariance `sqrt_cov`: Matrix square root of covariance: sqrt_cov.dot(sqrt_cov) == cov `sqrt_inv_cov`: Matrix square root of the inverse of covariance `log_det_cov`: The log of the determinant of the covariance matrix `principal_components`: The principal components of the data, based on mean and cov. ''' def __init__(self, mean=None, cov=None, nsamples=None, inv_cov=None): self.cov = cov self._inv_cov = inv_cov self.mean = mean self.nsamples = nsamples @property def cov(self): '''Property method returning the covariance matrix.''' return self._cov @cov.setter def cov(self, C): self.reset_derived_stats() self._cov = C @property def inv_cov(self): '''Property method returning the inverse of the covariance matrix.''' if self._inv_cov is None: self._inv_cov = np.linalg.inv(self._cov) return self._inv_cov def reset_derived_stats(self): self._cov = self._inv_cov = None self._sqrt_cov = self._sqrt_inv_cov = self._pcs = None self._log_det_cov = None @property def sqrt_cov(self): '''Property method returning the matrix square root of the covariance. If `C` is the covariance, then the returned value is a matrix `S` such that S.dot(S) == C. ''' if self._sqrt_cov is None: pcs = self.principal_components self._sqrt_cov = matrix_sqrt(eigs=(pcs.eigenvalues, pcs.eigenvectors), symmetric=True) return self._sqrt_cov @property def sqrt_inv_cov(self): '''Property method returning matrix square root of inverse of cov. If `C` is the covariance, then the returned value is a matrix `S` such that S.dot(S) == inv(C). ''' if self._sqrt_inv_cov is None: pcs = self.principal_components self._sqrt_inv_cov = matrix_sqrt(eigs=(pcs.eigenvalues, pcs.eigenvectors), symmetric=True, inverse=True) return self._sqrt_inv_cov @property def principal_components(self): if self._pcs is None: (evals, evecs) = np.linalg.eigh(self._cov) self._pcs = PrincipalComponents(evals, evecs, self) return self._pcs @property def log_det_cov(self): if self._log_det_cov is None: evals = self.principal_components.eigenvalues self._log_det_cov = np.sum(np.log([v for v in evals if v > 0])) return self._log_det_cov def transform(self, xform): '''Returns a version of the stats transformed by a linear transform.''' if not isinstance(xform, LinearTransform): raise TypeError('Expected a LinearTransform object.') m = xform(self.mean) C = xform._A.dot(self.cov).dot(xform._A.T) return GaussianStats(mean=m, cov=C, nsamples=self.nsamples) def get_whitening_transform(self): '''Returns transform that centers and whitens data for these stats.''' C_1 = np.linalg.inv(self.cov) return LinearTransform(matrix_sqrt(C_1, True), pre=-self.mean) def calc_stats(image, mask=None, index=None, allow_nan=False): '''Computes Gaussian stats for image data.. Arguments: `image` (ndarrray, :class:`~spectral.Image`, or :class:`spectral.Iterator`): If an ndarray, it should have shape `MxNxB` and the mean & covariance will be calculated for each band (third dimension). `mask` (ndarray): If `mask` is specified, mean & covariance will be calculated for all pixels indicated in the mask array. If `index` is specified, all pixels in `image` for which `mask == index` will be used; otherwise, all nonzero elements of `mask` will be used. `index` (int): Specifies which value in `mask` to use to select pixels from `image`. If not specified but `mask` is, then all nonzero elements of `mask` will be used. `allow_nan` (bool, default False): If True, statistics will be computed even if `np.nan` values are present in the data; otherwise, `~spectral.algorithms.spymath.NaNValueError` is raised. If neither `mask` nor `index` are specified, all samples in `vectors` will be used. Returns: `GaussianStats` object: This object will have members `mean`, `cov`, and `nsamples`. ''' (mean, cov, N) = mean_cov(image, mask, index) if has_nan(mean) and not allow_nan: raise NaNValueError('NaN values present in data.') return GaussianStats(mean=mean, cov=cov, nsamples=N) class TrainingClass: def __init__(self, image, mask, index=0, class_prob=1.0): '''Creates a new training class defined by applying `mask` to `image`. Arguments: `image` (:class:`spectral.Image` or :class:`numpy.ndarray`): The `MxNxB` image over which the training class is defined. `mask` (:class:`numpy.ndarray`): An `MxN` array of integers that specifies which pixels in `image` are associated with the class. `index` (int) [default 0]: if `index` == 0, all nonzero elements of `mask` are associated with the class. If `index` is nonzero, all elements of `mask` equal to `index` are associated with the class. `class_prob` (float) [default 1.0]: Defines the prior probability associated with the class, which is used in maximum likelihood classification. If `classProb` is 1.0, prior probabilities are ignored by classifiers, giving all class equal weighting. ''' self.image = image if image is not None: self.nbands = image.shape[2] self.nbands = None self.mask = mask self.index = index self.class_prob = class_prob self.stats = None self._stats_valid = False def __iter__(self): '''Returns an iterator over all samples for the class.''' it = ImageMaskIterator(self.image, self.mask, self.index) for i in it: yield i def stats_valid(self, tf=None): ''' Sets statistics for the TrainingClass to be valid or invalid. Arguments: `tf` (bool or None): A value evaluating to False indicates that statistics should be recalculated prior to being used. If the argument is `None`, a value will be returned indicating whether stats need to be recomputed. ''' if tf is None: return self._stats_valid self._stats_valid = tf def size(self): '''Returns the number of pixels/samples in the training set.''' # If the stats are invalid, the number of pixels in the # training set may have changed. if self._stats_valid: return self.stats.nsamples if self.index: return np.sum(np.equal(self.mask, self.index).ravel()) else: return np.sum(np.not_equal(self.mask, 0).ravel()) def calc_stats(self): ''' Calculates statistics for the class. This function causes the :attr:`stats` attribute of the class to be updated, where `stats` will have the following attributes: ============= ====================== =================================== Attribute Type Description ============= ====================== =================================== `mean` :class:`numpy.ndarray` length-`B` mean vector `cov` :class:`numpy.ndarray` `BxB` covariance matrix `inv_cov` :class:`numpy.ndarray` Inverse of `cov` `log_det_cov` float Natural log of determinant of `cov` ============= ====================== =================================== ''' self.stats = calc_stats(self.image, self.mask, self.index) self.nbands = self.image.shape[-1] self._stats_valid = True def transform(self, transform): ''' Perform a linear transformation on the statistics of the training set. Arguments: `transform` (:class:numpy.ndarray or LinearTransform): The linear transform array. If the class has `B` bands, then `transform` must have shape `(C,B)`. After `transform` is applied, the class statistics will have `C` bands. ''' if isinstance(transform, np.ndarray): transform = LinearTransform(transform) self.stats.mean = transform(self.stats.mean) self.stats.cov = np.dot( transform._A, self.stats.cov).dot(transform._A.T) self.nbands = transform.dim_out class SampleIterator: '''Iterator over all classes and samples in a TrainingClassSet object.''' def __init__(self, trainingData): self.classes = trainingData def __iter__(self): for cl in self.classes: for sample in cl: yield sample class TrainingClassSet: '''A class to manage a set of :class:`~spectral.TrainingClass` objects.''' def __init__(self): self.classes = {} self.nbands = None def __getitem__(self, i): '''Returns the training class having ID i.''' return self.classes[i] def __len__(self): '''Returns number of training classes in the set.''' return len(self.classes) def add_class(self, cl): '''Adds a new class to the training set. Arguments: `cl` (:class:`spectral.TrainingClass`): `cl.index` must not duplicate a class already in the set. ''' if cl.index in self.classes: raise Exception('Attempting to add class with duplicate index.') self.classes[cl.index] = cl if not self.nbands: self.nbands = cl.nbands def transform(self, X): '''Applies linear transform, M, to all training classes. Arguments: `X` (:class:numpy.ndarray): The linear transform array. If the classes have `B` bands, then `X` must have shape `(C,B)`. After the transform is applied, all classes will have `C` bands. ''' for cl in list(self.classes.values()): cl.transform(X) self.nbands = list(self.classes.values())[0].nbands def __iter__(self): '''An iterator over all training classes in the set.''' for cl in list(self.classes.values()): yield cl def all_samples(self): '''An iterator over all samples in all classes.''' return SampleIterator(self) def calc_stats(self): '''Computes statistics for each class, if not already computed.''' for c in list(self.classes.values()): if not c.stats_valid(): c.calc_stats() self.nbands = list(self.classes.values())[0].nbands def save(self, filename, calc_stats=False): for c in list(self.classes.values()): if c.stats is None: if calc_stats == False: msg = 'Class statistics are missing from at least one ' \ 'class and are required to save the training class ' \ 'data. Call the `save` method with keyword ' \ '`calc_stats=True` if you want to compute them and ' \ 'then save the class data.' raise Exception (msg) else: c.calc_stats() f = open(filename, 'wb') ids = sorted(self.classes.keys()) pickle.dump(self.classes[ids[0]].mask, f) pickle.dump(len(self), f) for id in ids: c = self.classes[id] pickle.dump(c.index, f) pickle.dump(c.stats.cov, f) pickle.dump(c.stats.mean, f) pickle.dump(c.stats.nsamples, f) pickle.dump(c.class_prob, f) f.close() def load(self, filename, image): f = open(filename, 'rb') mask = pickle.load(f) nclasses = pickle.load(f) for i in range(nclasses): index = pickle.load(f) cov = pickle.load(f) mean = pickle.load(f) nsamples = pickle.load(f) class_prob = pickle.load(f) c = TrainingClass(image, mask, index, class_prob) c.stats = GaussianStats(mean=mean, cov=cov, nsamples=nsamples) if not (cov is None or mean is None or nsamples is None): c.stats_valid(True) c.nbands = len(mean) self.add_class(c) f.close def create_training_classes(image, class_mask, calc_stats=False, indices=None): ''' Creates a :class:spectral.algorithms.TrainingClassSet: from an indexed array. USAGE: sets = createTrainingClasses(classMask) Arguments: `image` (:class:`spectral.Image` or :class:`numpy.ndarray`): The image data for which the training classes will be defined. `image` has shape `MxNxB`. `class_mask` (:class:`numpy.ndarray`): A rank-2 array whose elements are indices of various spectral classes. if `class_mask[i,j]` == `k`, then `image[i,j]` is assumed to belong to class `k`. `calc_stats` (bool): An optional parameter which, if True, causes statistics to be calculated for all training classes. Returns: A :class:`spectral.algorithms.TrainingClassSet` object. The dimensions of classMask should be the same as the first two dimensions of the corresponding image. Values of zero in classMask are considered unlabeled and are not added to a training set. ''' if indices is not None: class_indices = set(indices) - set((0,)) else: class_indices = set(class_mask.ravel()) - set((0,)) classes = TrainingClassSet() classes.nbands = image.shape[-1] for i in class_indices: cl = TrainingClass(image, class_mask, i) if calc_stats: cl.calc_stats() classes.add_class(cl) return classes def ndvi(data, red, nir): '''Calculates Normalized Difference Vegetation Index (NDVI). Arguments: `data` (ndarray or :class:`spectral.Image`): The array or SpyFile for which to calculate the index. `red` (int or int range): Index of the red band or an index range for multiple bands. `nir` (int or int range): An integer index of the near infrared band or an index range for multiple bands. Returns an ndarray: An array containing NDVI values in the range [-1.0, 1.0] for each corresponding element of data. ''' r = data[:, :, red].astype(float) if len(r.shape) == 3 and r.shape[2] > 1: r = sum(r, 2) / r.shape[2] n = data[:, :, nir].astype(float) if len(n.shape) == 3 and n.shape[2] > 1: n = sum(n, 2) / n.shape[2] return (n - r) / (n + r) def bdist(class1, class2): ''' Calulates the Bhattacharyya distance between two classes. USAGE: bd = bdist(class1, class2) Arguments: `class1`, `class2` (:class:`~spectral.algorithms.algorithms.TrainingClass`) Returns: A float value for the Bhattacharyya Distance between the classes. This function is aliased to :func:`~spectral.algorithms.algorithms.bDistance`. References: Richards, J.A. & Jia, X. Remote Sensing Digital Image Analysis: An Introduction. (Springer: Berlin, 1999). ''' terms = bdist_terms(class1, class2) return terms[0] + terms[1] bDistance = bdist def bdist_terms(a, b): ''' Calulate the linear and quadratic terms of the Bhattacharyya distance between two classes. USAGE: (linTerm, quadTerm) = bDistanceTerms(a, b) ARGUMENTS: (a, b) The classes for which to determine the B-distance. RETURN VALUE: A 2-tuple of the linear and quadratic terms ''' m = a.stats.mean - b.stats.mean avgCov = (a.stats.cov + b.stats.cov) / 2 lin_term = (1 / 8.) * np.dot(np.transpose(m), np.dot(np.inv(avgCov), m)) quad_term = 0.5 * (log_det(avgCov) - 0.5 * a.stats.log_det_cov - 0.5 * b.stats.log_det_cov) return (lin_term, float(quad_term)) def transform_image(matrix, image): ''' Performs linear transformation on all pixels in an image. Arguments: matrix (:class:`numpy.ndarray`): A `CxB` linear transform to apply. image (:class:`numpy.ndarray` or :class:`spectral.Image`): Image data to transform Returns: If `image` is an `MxNxB` :class:`numpy.ndarray`, the return will be a transformed :class:`numpy.ndarray` with shape `MxNxC`. If `image` is :class:`spectral.Image`, the returned object will be a :class:`spectral.TransformedImage` object and no transformation of data will occur until elements of the object are accessed. ''' if isinstance(image, SpyFile): return TransformedImage(matrix, image) elif isinstance(image, np.ndarray): (M, N, B) = image.shape ximage = np.zeros((M, N, matrix.shape[0]), float) for i in range(M): for j in range(N): ximage[i, j] = np.dot(matrix, image[i, j].astype(float)) return ximage else: raise 'Unrecognized image type passed to transform_image.' def orthogonalize(vecs, start=0): ''' Performs Gram-Schmidt Orthogonalization on a set of vectors. Arguments: `vecs` (:class:`numpy.ndarray`): The set of vectors for which an orthonormal basis will be created. If there are `C` vectors of length `B`, `vecs` should be `CxB`. `start` (int) [default 0]: If `start` > 0, then `vecs[start]` will be assumed to already be orthonormal. Returns: A new `CxB` containing an orthonormal basis for the given vectors. ''' (M, N) = vecs.shape basis = np.array(np.transpose(vecs)) eye = np.identity(N).astype(float) for i in range(start, M): if i == 0: basis[:, 0] /= np.linalg.norm(basis[:, 0]) continue v = basis[:, i] / np.linalg.norm(basis[:, i]) U = basis[:, :i] P = eye - U.dot(np.linalg.inv(U.T.dot(U)).dot(U.T)) basis[:, i] = P.dot(v) basis[:, i] /= np.linalg.norm(basis[:, i]) return np.transpose(basis) def unmix(data, members): ''' Perform linear unmixing on image data. USAGE: mix = unmix(data, members) ARGUMENTS: data The MxNxB image data to be unmixed members An CxB array of C endmembers RETURN VALUE: mix An MxNxC array of endmember fractions. unmix performs linear unmixing on the image data. After calling the function, mix[:,:,i] will then represent the fractional abundances for the i'th endmember. If the result of unmix is returned into 'mix', then an array of indices of greatest fractional endmembers is obtained by argmax(mix). Note that depending on endmembers given, fractional abundances for endmembers may be negative. ''' assert members.shape[1] == data.shape[2], \ 'Matrix dimensions are not aligned.' members = members.astype(float) # Calculate the pseudo inverse pi = np.dot(members, np.transpose(members)) pi = np.dot(np.inv(pi), members) (M, N, B) = data.shape unmixed = np.zeros((M, N, members.shape[0]), float) for i in range(M): for j in range(N): unmixed[i, j] = np.dot(pi, data[i, j].astype(float)) return unmixed def spectral_angles(data, members): '''Calculates spectral angles with respect to given set of spectra. Arguments: `data` (:class:`numpy.ndarray` or :class:`spectral.Image`): An `MxNxB` image for which spectral angles will be calculated. `members` (:class:`numpy.ndarray`): `CxB` array of spectral endmembers. Returns: `MxNxC` array of spectral angles. Calculates the spectral angles between each vector in data and each of the endmembers. The output of this function (angles) can be used to classify the data by minimum spectral angle by calling argmin(angles). ''' assert members.shape[1] == data.shape[2], \ 'Matrix dimensions are not aligned.' m = np.array(members, np.float64) m /= np.sqrt(np.einsum('ij,ij->i', m, m))[:, np.newaxis] norms = np.sqrt(np.einsum('ijk,ijk->ij', data, data)) dots = np.einsum('ijk,mk->ijm', data, m) dots = np.clip(dots / norms[:, :, np.newaxis], -1, 1) return np.arccos(dots) def msam(data, members): '''Modified SAM scores according to Oshigami, et al [1]. Endmembers are mean-subtracted prior to spectral angle calculation. Results are normalized such that the maximum value of 1 corresponds to a perfect match (zero spectral angle). Arguments: `data` (:class:`numpy.ndarray` or :class:`spectral.Image`): An `MxNxB` image for which spectral angles will be calculated. `members` (:class:`numpy.ndarray`): `CxB` array of spectral endmembers. Returns: `MxNxC` array of MSAM scores with maximum value of 1 corresponding to a perfect match (zero spectral angle). Calculates the spectral angles between each vector in data and each of the endmembers. The output of this function (angles) can be used to classify the data by minimum spectral angle by calling argmax(angles). References: [1] Shoko Oshigami, Yasushi Yamaguchi, Tatsumi Uezato, Atsushi Momose, Yessy Arvelyna, Yuu Kawakami, Taro Yajima, Shuichi Miyatake, and Anna Nguno. 2013. Mineralogical mapping of southern Namibia by application of continuum-removal MSAM method to the HyMap data. Int. J. Remote Sens. 34, 15 (August 2013), 5282-5295. ''' # The modifications to the `spectral_angles` function were contributed by # Christian Mielke. assert members.shape[1] == data.shape[2], \ 'Matrix dimensions are not aligned.' (M, N, B) = data.shape m = np.array(members, np.float64) C = m.shape[0] # Normalize endmembers for i in range(C): # Fisher z trafo type operation m[i] -= np.mean(m[i]) m[i] /= np.sqrt(m[i].dot(m[i])) angles = np.zeros((M, N, C), np.float64) for i in range(M): for j in range(N): #Fisher z trafo type operation v = data[i, j] - np.mean(data[i, j]) v /= np.sqrt(v.dot(v)) v = np.clip(v, -1, 1) for k in range(C): # Calculate Mineral Index according to Oshigami et al. # (Intnl. J. of Remote Sens. 2013) a = np.clip(v.dot(m[k]), -1, 1) angles[i,j,k]= 1.0 - np.arccos(a) / (math.pi / 2) return angles def noise_from_diffs(X, direction='lowerright'): '''Estimates noise statistcs by taking differences of adjacent pixels. Arguments: `X` (np.ndarray): The data from which to estimage noise statistics. `X` should have shape `(nrows, ncols, nbands`). `direction` (str, default "lowerright"): The pixel direction along which to calculate pixel differences. Must be one of the following: 'lowerright': Take difference with pixel diagonally to lower right 'lowerleft': Take difference with pixel diagonally to lower right 'right': Take difference with pixel to the right 'lower': Take differenece with pixel below Returns a :class:`~spectral.algorithms.algorithms.GaussianStats` object. ''' if direction.lower() not in ['lowerright', 'lowerleft', 'right', 'lower']: raise ValueError('Invalid `direction` value.') if direction == 'lowerright': deltas = X[:-1, :-1, :] - X[1:, 1:, :] elif direction == 'lowerleft': deltas = X[:-1, 1:, :] - X[1:, :-1, :] elif direction == 'right': deltas = X[:, :-1, :] - X[:, 1:, :] else: deltas = X[:-1, :, :] - X[1:, :, :] stats = calc_stats(deltas) stats.cov /= 2.0 return stats class MNFResult(object): '''Result object returned by :func:`~spectral.algorithms.algorithms.mnf`. This object contains data associates with a Minimum Noise Fraction calculation, including signal and noise statistics, as well as the Noise-Adjusted Principal Components (NAPC). This object can be used to denoise image data or to reduce its dimensionality. ''' def __init__(self, signal, noise, napc): ''' Arguments: `signal` (:class:`~spectral.GaussianStats`): Signal statistics `noise` (:class:`~spectral.GaussianStats`): Noise statistics `napc` (:class:`~spectral.PrincipalComponents`): Noise-Adjusted Pricipal Components ''' self.signal = signal self.noise = noise self.napc = napc def _num_from_kwargs(self, **kwargs): '''Returns number of components to retain for the given kwargs.''' for key in kwargs: if key not in ('num', 'snr'): raise Exception('Keyword not recognized.') num = kwargs.get('num', None) snr = kwargs.get('snr', None) if num == snr == None: raise Exception('Must specify either `num` or `snr` keyword.') if None not in (num, snr): raise Exception('Can not specify both `num` and `snr` keywords.') if snr is not None: num = self.num_with_snr(snr) return num def denoise(self, X, **kwargs): '''Returns a de-noised version of `X`. Arguments: `X` (np.ndarray): Data to be de-noised. Can be a single pixel or an image. One (and only one) of the following keywords must be specified: `num` (int): Number of Noise-Adjusted Principal Components to retain. `snr` (float): Threshold signal-to-noise ratio (SNR) to retain. Returns denoised image data with same shape as `X`. Note that calling this method is equivalent to calling the `get_denoising_transform` method with same keyword and applying the returned transform to `X`. If you only intend to denoise data with the same parameters multiple times, then it is more efficient to get the denoising transform and reuse it, rather than calling this method multilple times. ''' f = self.get_denoising_transform(**kwargs) return f(X) def get_denoising_transform(self, **kwargs): '''Returns a function for denoising image data. One (and only one) of the following keywords must be specified: `num` (int): Number of Noise-Adjusted Principal Components to retain. `snr` (float): Threshold signal-to-noise ratio (SNR) to retain. Returns a callable :class:`~spectral.algorithms.transforms.LinearTransform` object for denoising image data. ''' N = self._num_from_kwargs(**kwargs) V = self.napc.eigenvectors Vr = np.array(V) Vr[:, N:] = 0. f = LinearTransform(self.noise.sqrt_cov.dot(Vr).dot(V.T) \ .dot(self.noise.sqrt_inv_cov), pre=-self.signal.mean, post=self.signal.mean) return f def reduce(self, X, **kwargs): '''Reduces dimensionality of image data. Arguments: `X` (np.ndarray): Data to be reduced. Can be a single pixel or an image. One (and only one) of the following keywords must be specified: `num` (int): Number of Noise-Adjusted Principal Components to retain. `snr` (float): Threshold signal-to-noise ratio (SNR) to retain. Returns a verions of `X` with reduced dimensionality. Note that calling this method is equivalent to calling the `get_reduction_transform` method with same keyword and applying the returned transform to `X`. If you intend to denoise data with the same parameters multiple times, then it is more efficient to get the reduction transform and reuse it, rather than calling this method multilple times. ''' f = self.get_reduction_transform(**kwargs) return f(X) def get_reduction_transform(self, **kwargs): '''Reduces dimensionality of image data. One (and only one) of the following keywords must be specified: `num` (int): Number of Noise-Adjusted Principal Components to retain. `snr` (float): Threshold signal-to-noise ratio (SNR) to retain. Returns a callable :class:`~spectral.algorithms.transforms.LinearTransform` object for reducing the dimensionality of image data. ''' N = self._num_from_kwargs(**kwargs) V = self.napc.eigenvectors f = LinearTransform(V[:, :N].T.dot(self.noise.sqrt_inv_cov), pre=-self.signal.mean) return f def num_with_snr(self, snr): '''Returns the number of components with SNR >= `snr`.''' return np.sum(self.napc.eigenvalues >= (snr + 1)) def mnf(signal, noise): '''Computes Minimum Noise Fraction / Noise-Adjusted Principal Components. Arguments: `signal` (:class:`~spectral.algorithms.algorithms.GaussianStats`): Estimated signal statistics `noise` (:class:`~spectral.algorithms.algorithms.GaussianStats`): Estimated noise statistics Returns an :class:`~spectral.algorithms.algorithms.MNFResult` object, containing the Noise-Adjusted Principal Components (NAPC) and methods for denoising or reducing dimensionality of associated data. The Minimum Noise Fraction (MNF) is similar to the Principal Components transformation with the difference that the Principal Components associated with the MNF are ordered by descending signal-to-noise ratio (SNR) rather than overall image variance. Note that the eigenvalues of the NAPC are equal to one plus the SNR in the transformed space (since noise has whitened unit variance in the NAPC coordinate space). Example: >>> data = open_image('92AV3C.lan').load() >>> signal = calc_stats(data) >>> noise = noise_from_diffs(data[117: 137, 85: 122, :]) >>> mnfr = mnf(signal, noise) >>> # De-noise the data by eliminating NAPC components where SNR < 10. >>> # The de-noised data will be in the original coordinate space (at >>> # full dimensionality). >>> denoised = mnfr.denoise(data, snr=10) >>> # Reduce dimensionality, retaining NAPC components where SNR >= 10. >>> reduced = mnfr.reduce(data, snr=10) >>> # Reduce dimensionality, retaining top 50 NAPC components. >>> reduced = mnfr.reduce(data, num=50) References: Lee, James B., A. Stephen Woodyatt, and Mark Berman. "Enhancement of high spectral resolution remote-sensing data by a noise-adjusted principal components transform." Geoscience and Remote Sensing, IEEE Transactions on 28.3 (1990): 295-304. ''' C = noise.sqrt_inv_cov.dot(signal.cov).dot(noise.sqrt_inv_cov) (L, V) = np.linalg.eig(C) # numpy says eigenvalues may not be sorted so we'll sort them, if needed. if not np.alltrue(np.diff(L) <= 0): ii = list(reversed(np.argsort(L))) L = L[ii] V = V[:, ii] wstats = GaussianStats(mean=np.zeros_like(L), cov=C) napc = PrincipalComponents(L, V, wstats) return MNFResult(signal, noise, napc) def ppi(X, niters, threshold=0, centered=False, start=None, display=0, **imshow_kwargs): '''Returns pixel purity indices for an image. Arguments: `X` (ndarray): Image data for which to calculate pixel purity indices `niters` (int): Number of iterations to perform. Each iteration corresponds to a projection of the image data onto a random unit vector. `threshold` (numeric): If this value is zero, only the two most extreme pixels will have their indices incremented for each random vector. If the value is greater than zero, then all pixels whose projections onto the random vector are with `threshold` data units of either of the two extreme pixels will also have their indices incremented. `centered` (bool): If True, then the pixels in X are assumed to have their mean already subtracted; otherwise, the mean of `X` will be computed and subtracted prior to computing the purity indices. `start` (ndarray): An optional array of initial purity indices. This can be used to continue computing PPI values after a previous call to `ppi` (i.e., set `start` equal to the return value from a previou call to `ppi`. This should be an integer-valued array whose dimensions are equal to the first two dimensions of `X`. `display` (integer): If set to a postive integer, a :class:`~spectral.graphics.spypylab.ImageView` window will be opened and dynamically display PPI values as the function iterates. The value specifies the number of PPI iterations between display updates. It is recommended to use a value around 100 or higher. If the `stretch` keyword (see :func:`~spectral.graphics.graphics.get_rgb` for meaning) is not provided, a default stretch of (0.99, 0.999) is used. Return value: An ndarray of integers that represent the pixel purity indices of the input image. The return array will have dimensions equal to the first two dimensions of the input image. Keyword Arguments: Any keyword accepted by :func:`~spectral.graphics.spypylab.imshow`. These keywords will be passed to the image display and only have an effect if the `display` argument is nonzero. This function can be interruped with a KeyboardInterrupt (ctrl-C), in which case, the most recent value of the PPI array will be returned. This can be used in conjunction with the `display` argument to view the progression of the PPI values until they appear stable, then terminate iteration using ctrl-C. References: Boardman J.W., Kruse F.A, and Green R.O., "Mapping Target Signatures via Partial Unmixing of AVIRIS Data," Pasadena, California, USA, 23 Jan 1995, URI: http://hdl.handle.net/2014/33635 ''' if display is not None: if not isinstance(display, Integral) or isinstance(display, bool) or \ display < 0: msg = '`display` argument must be a non-negative integer.' raise ValueError(msg) if not centered: stats = calc_stats(X) X = X - stats.mean shape = X.shape X = X.reshape(-1, X.shape[-1]) nbands = X.shape[-1] fig = None updating = False if start is not None: counts = np.array(start.ravel()) else: counts = np.zeros(X.shape[0], dtype=np.uint32) if 'stretch' not in imshow_kwargs: imshow_kwargs['stretch'] = (0.99, 0.999) msg = 'Running {0} pixel purity iterations...'.format(niters) spy._status.display_percentage(msg) try: for i in range(niters): r = np.random.rand(nbands) - 0.5 r /= np.sqrt(np.sum(r * r)) s = X.dot(r) imin = np.argmin(s) imax = np.argmax(s) updating = True if threshold == 0: # Only the two extreme pixels are incremented counts[imin] += 1 counts[imax] += 1 else: # All pixels within threshold distance from the two extremes counts[s >= (s[imax] - threshold)] += 1 counts[s <= (s[imin] + threshold)] += 1 updating = False if display > 0 and (i + 1) % display == 0: if fig is not None: fig.set_data(counts.reshape(shape[:2]), **imshow_kwargs) else: fig = spy.imshow(counts.reshape(shape[:2]), **imshow_kwargs) fig.set_title('PPI ({} iterations)'.format(i + 1)) if not (i + 1) % 10: spy._status.update_percentage(100 * (i + 1) / niters) except KeyboardInterrupt: spy._status.end_percentage('interrupted') if not updating: msg = 'KeyboardInterrupt received. Returning pixel purity ' \ 'values after {0} iterations.'.format(i) spy._status.write(msg) return counts.reshape(shape[:2]) else: msg = 'KeyboardInterrupt received during array update. PPI ' \ 'values may be corrupt. Returning None' spy._status.write(msg) return None spy._status.end_percentage() return counts.reshape(shape[:2]) def smacc(spectra, min_endmembers=None, max_residual_norm=float('Inf')): '''Returns SMACC decomposition (H = F * S + R) matrices for an image or array of spectra. Let `H` be matrix of shape NxB, where B is number of bands, and N number of spectra, then if `spectra` is of the same shape, `H` will be equal to `spectra`. Otherwise, `spectra` is assumed to be 3D spectral image, and it is reshaped to match shape of `H`. Arguments: `spectra` (ndarray): Image data for which to calculate SMACC decomposition matrices. `min_endmembers` (int): Minimal number of endmembers to find. Defaults to rank of `H`, computed numerically with `numpy.linalg.matrix_rank`. `max_residual_norm`: Maximum value of residual vectors' norms. Algorithm will keep finding new endmembers until max value of residual norms is less than this argument. Defaults to float('Inf') Returns: 3 matrices, S, F and R, such that H = F * S + R (but it might not always hold). F is matrix of expansion coefficients of shape N x num_endmembers. All values of F are equal to, or greater than zero. S is matrix of endmember spectra, extreme vectors, of shape num_endmembers x B. R is matrix of residuals of same shape as H (N x B). If values of H are large (few tousands), H = F * S + R, might not hold, because of numeric errors. It is advisable to scale numbers, by dividing by 10000, for example. Depending on how accurate you want it to be, you can check if H is really strictly equal to F * S + R, and adjust R: R = H - np.matmul(F, S). References: John H. Gruninger, Anthony J. Ratkowski, and Michael L. Hoke "The sequential maximum angle convex cone (SMACC) endmember model", Proc. SPIE 5425, Algorithms and Technologies for Multispectral, Hyperspectral, and Ultraspectral Imagery X, (12 August 2004); https://doi.org/10.1117/12.543794 ''' # Indices of vectors in S. q = [] H = spectra if len(spectra.shape) == 2 else spectra.reshape( (spectra.shape[0] * spectra.shape[1], spectra.shape[2])) R = H Fs = [] F = None S = None if min_endmembers is None: min_endmembers = np.linalg.matrix_rank(H) # Add the longest vector to q. residual_norms = np.sqrt(np.einsum('ij,ij->i', H, H)) current_max_residual_norm = np.max(residual_norms) if max_residual_norm is None: max_residual_norm = current_max_residual_norm / min_endmembers while len(q) < min_endmembers or current_max_residual_norm > max_residual_norm: q.append(np.argmax(residual_norms)) n = len(q) - 1 # Current basis vector. w = R[q[n]] # Temporary to be used for projection calculation. wt = w / (np.dot(w, w)) # Calculate projection coefficients. On = np.dot(R, wt) alpha = np.ones(On.shape, dtype=np.float64) # Make corrections to satisfy convex cone conditions. # First correct alphas for oblique projection when needed. for k in range(len(Fs)): t = On * Fs[k][q[n]] # This is not so important for the algorithm itself. # These values correpond to values where On == 0.0, and these # will be zeroed out below. But to avoid divide-by-zero warning # we set small values instead of zero. t[t == 0.0] = 1e-10 np.minimum(Fs[k]/t, alpha, out=alpha) # Clip negative projection coefficients. alpha[On <= 0.0] = 0.0 # Current extreme vector should always be removed completely. alpha[q[n]] = 1.0 # Calculate oblique projection coefficients. Fn = alpha * On # Correction for numerical stability. Fn[Fn <= 0.0] = 0.0 # Remove projection to current basis from R. R = R - np.outer(Fn, w) # Update projection coefficients. for k in range(len(Fs)): Fs[k] -= Fs[k][q[n]] * Fn # Correction because of numerical problems. Fs[k][Fs[k] <= 0.0] = 0.0 # Add new Fn. Fs.append(Fn) residual_norms[:] = np.sqrt(np.einsum('ij,ij->i', R, R)) current_max_residual_norm = np.max(residual_norms) print('Found {0} endmembers, current max residual norm is {1:.4f}\r' .format(len(q), current_max_residual_norm), end='') # Correction as suggested in the SMACC paper. for k, s in enumerate(q): Fs[k][q] = 0.0 Fs[k][s] = 1.0 F = np.array(Fs).T S = H[q] # H = F * S + R return S, F, R spectral-0.22.4/spectral/algorithms/classifiers.py000066400000000000000000000400201412674721200222650ustar00rootroot00000000000000''' Supervised classifiers and base class for all classifiers. ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging import math import numpy import numpy as np from warnings import warn import spectral as spy from .algorithms import GaussianStats, ImageIterator from .detectors import RX from .perceptron import Perceptron __all__ = ('GaussianClassifier', 'MahalanobisDistanceClassifier', 'PerceptronClassifier') class Classifier(object): ''' Base class for Classifiers. Child classes must implement the classify_spectrum method. ''' # It is often faster to compute the detector/classifier scores for the # entire image for each class, rather than for each class on a per-pixel # basis. However, this significantly increases memory requirements. If # the following parameter is True, class scores will be computed for the # entire image. cache_class_scores = True def __init__(self): pass def classify_spectrum(self, *args, **kwargs): raise NotImplementedError('Classifier.classify_spectrum must be ' 'overridden by a child class.') def classify_image(self, image): '''Classifies an entire image, returning a classification map. Arguments: `image` (ndarray or :class:`spectral.Image`) The `MxNxB` image to classify. Returns (ndarray): An `MxN` ndarray of integers specifying the class for each pixel. ''' status = spy._status status.display_percentage('Classifying image...') it = ImageIterator(image) class_map = np.zeros(image.shape[:2], np.int16) N = it.get_num_elements() i, inc = (0, N / 100) for spectrum in it: class_map[it.row, it.col] = self.classify_spectrum(spectrum) i += 1 if not i % inc: status.update_percentage(float(i) / N * 100.) status.end_percentage() return class_map def classify(self, X, **kwargs): if X.ndim == 1: return self.classify_spectrum(X, **kwargs) else: return self.classify_image(X, **kwargs) class SupervisedClassifier(Classifier): def __init__(self): pass def train(self): pass class GaussianClassifier(SupervisedClassifier): '''A Gaussian Maximum Likelihood Classifier''' def __init__(self, training_data=None, min_samples=None): '''Creates the classifier and optionally trains it with training data. Arguments: `training_data` (:class:`~spectral.algorithms.TrainingClassSet`): The training classes on which to train the classifier. `min_samples` (int) [default None]: Minimum number of samples required from a training class to include it in the classifier. ''' if min_samples: self.min_samples = min_samples else: self.min_samples = None if training_data: self.train(training_data) def train(self, training_data): '''Trains the classifier on the given training data. Arguments: `training_data` (:class:`~spectral.algorithms.TrainingClassSet`): Data for the training classes. ''' logger = logging.getLogger('spectral') if not self.min_samples: # Set minimum number of samples to the number of bands in the image self.min_samples = training_data.nbands logger.info('Setting min samples to %d', self.min_samples) self.classes = [] for cl in training_data: if cl.size() >= self.min_samples: self.classes.append(cl) else: logger.warn('Omitting class %3d : only %d samples present', cl.index, cl.size()) for cl in self.classes: if not hasattr(cl, 'stats') or not cl.stats_valid(): cl.calc_stats() def classify_spectrum(self, x): ''' Classifies a pixel into one of the trained classes. Arguments: `x` (list or rank-1 ndarray): The unclassified spectrum. Returns: `classIndex` (int): The index for the :class:`~spectral.algorithms.TrainingClass` to which `x` is classified. ''' scores = np.empty(len(self.classes)) for (i, cl) in enumerate(self.classes): delta = (x - cl.stats.mean) scores[i] = math.log(cl.class_prob) - 0.5 * cl.stats.log_det_cov \ - 0.5 * delta.dot(cl.stats.inv_cov).dot(delta) return self.classes[np.argmax(scores)].index def classify_image(self, image): '''Classifies an entire image, returning a classification map. Arguments: `image` (ndarray or :class:`spectral.Image`) The `MxNxB` image to classify. Returns (ndarray): An `MxN` ndarray of integers specifying the class for each pixel. ''' if not (self.cache_class_scores and isinstance(image, np.ndarray)): return super(GaussianClassifier, self).classify_image(image) status = spy._status status.display_percentage('Processing...') shape = image.shape image = image.reshape(-1, shape[-1]) scores = np.empty((image.shape[0], len(self.classes)), np.float64) delta = np.empty_like(image, dtype=np.float64) # For some strange reason, creating Y with np.emtpy_like will sometimes # result in the following error when attempting an in-place np.dot: # ValueError: output array is not acceptable (must have the right # type, nr dimensions, and be a C-Array) # It appears that this may be happening when delta is not contiguous, # although it isn't clear why the alternate construction of Y below # does work. Y = np.empty_like(delta) for (i, c) in enumerate(self.classes): scalar = math.log(c.class_prob) - 0.5 * c.stats.log_det_cov delta = np.subtract(image, c.stats.mean, out=delta) try: Y = delta.dot(-0.5 * c.stats.inv_cov, out=Y) except: # Unable to output np.dot to existing array. Allocate new # storage instead. This will not affect results but may be # slower. Y = delta.dot(-0.5 * c.stats.inv_cov) scores[:, i] = np.einsum('ij,ij->i', Y, delta) scores[:, i] += scalar status.update_percentage(100. * (i + 1) / len(self.classes)) status.end_percentage() inds = np.array([c.index for c in self.classes], dtype=np.int16) mins = np.argmax(scores, axis=-1) return inds[mins].reshape(shape[:2]) class MahalanobisDistanceClassifier(GaussianClassifier): '''A Classifier using Mahalanobis distance for class discrimination''' def train(self, trainingData): '''Trains the classifier on the given training data. Arguments: `trainingData` (:class:`~spectral.algorithms.TrainingClassSet`): Data for the training classes. ''' GaussianClassifier.train(self, trainingData) covariance = numpy.zeros(self.classes[0].stats.cov.shape, numpy.float) nsamples = np.sum(cl.stats.nsamples for cl in self.classes) for cl in self.classes: covariance += (cl.stats.nsamples / float(nsamples)) * cl.stats.cov self.background = GaussianStats(cov=covariance) def classify_spectrum(self, x): ''' Classifies a pixel into one of the trained classes. Arguments: `x` (list or rank-1 ndarray): The unclassified spectrum. Returns: `classIndex` (int): The index for the :class:`~spectral.algorithms.TrainingClass` to which `x` is classified. ''' scores = np.empty(len(self.classes)) for (i, cl) in enumerate(self.classes): delta = (x - cl.stats.mean) scores[i] = delta.dot(self.background.inv_cov).dot(delta) return self.classes[np.argmin(scores)].index def classify_image(self, image): '''Classifies an entire image, returning a classification map. Arguments: `image` (ndarray or :class:`spectral.Image`) The `MxNxB` image to classify. Returns (ndarray): An `MxN` ndarray of integers specifying the class for each pixel. ''' if not (self.cache_class_scores and isinstance(image, np.ndarray)): return super(MahalanobisDistanceClassifier, self).classify_image(image) # We can cheat here and just compute RX scores for the image for each # class, keeping the background covariance constant and setting the # background mean to the mean of the particular class being evaluated. scores = np.empty(image.shape[:2] + (len(self.classes),), np.float64) status = spy._status status.display_percentage('Processing...') rx = RX() for (i, c) in enumerate(self.classes): self.background.mean = c.stats.mean rx.set_background(self.background) scores[:, :, i] = rx(image) status.update_percentage(100. * (i + 1) / len(self.classes)) status.end_percentage() inds = np.array([c.index for c in self.classes], np.int16) mins = np.argmin(scores, axis=-1) return inds[mins] class PerceptronClassifier(Perceptron, SupervisedClassifier): '''A multi-layer perceptron classifier with backpropagation learning. Multi-layer perceptrons often require many (i.e., thousands) of iterations through the traning data to converge on a solution. Therefore, it is not recommended to attempt training a network on full-dimensional hyperspectral data or even on a full set of image pixels. It is likely preferable to first train the network on a subset of the data, then retrain the network (starting with network weights from initial training) on the full data set. Example usage: Train an MLP with 20 samples from each training class after performing dimensionality reduction: >>> classes = create_training_classes(data, gt) >>> fld = linear_discriminant(classes) >>> xdata = fld.transform(data) >>> classes = create_training_classes(xdata, gt) >>> nfeatures = xdata.shape[-1] >>> nclasses = len(classes) >>> >>> p = PerceptronClassifier([nfeatures, 20, 8, nclasses]) >>> p.train(classes, 20, clip=0., accuracy=100., batch=1, >>> momentum=0.3, rate=0.3) >>> c = p.classify(xdata) ''' def train(self, training_data, samples_per_class=0, *args, **kwargs): '''Trains the Perceptron on the training data. Arguments: `training_data` (:class:`~spectral.TrainingClassSet`): Data for the training classes. `samples_per_class` (int): Maximum number of training observations to user from each class in `training_data`. If this argument is not provided, all training data is used. Keyword Arguments: `accuracy` (float): The percent training accuracy at which to terminate training, if the maximum number of iterations are not reached first. This value can be set greater than 100 to force a specified number of training iterations to be performed (e.g., to continue reducing the error term after 100% classification accuracy has been achieved. `rate` (float): The perceptron learning rate (typically in the range (0, 1]). `momentum` (float): The perceptron learning momentum term, which specifies the fraction of the previous update value that should be added to the current update term. The value should be in the range [0, 1). `batch` (positive integer): Specifies how many samples should be evaluated before an update is made to the perceptron weights. A value of 0 indicates batch updates should be performed (evaluate all training inputs prior to updating). Otherwise, updates will be aggregated for every `batch` inputs (i.e., `batch` == 1 is stochastic learning). `clip` (float >= 0): Optional clipping value to limit sigmoid output during training. The sigmoid function has output in the range (0, 1). If the `clip` argument is set to `a` then all neuron outputs for the layer will be constrained to the range [a, 1 - a]. This can improve perceptron learning rate in some situations. After training the perceptron with a clipping value, `train` can be called again with clipping set to 0 to continue reducing the training error. `on_iteration` (callable): A callable object that accepts the perceptron as input and returns bool. If this argument is set, the object will be called at the end of each training iteration with the perceptron as its argument. If the callable returns True, training will terminate. `stdout`: An object with a `write` method that can be set to redirect training status messages somewhere other than stdout. To suppress output, set `stdout` to None. Return value: Returns True if desired accuracy was achieved. Neural networks can require many iterations through a data set to converge. If convergence slows (as indicated by small changes in residual error), training can be terminated by pressing CTRL-C, which will preserve the network weights from the previous training iteration. `train` can then be called again with altered training parameters (e.g., increased learning rate or momentum) to increase the convergence rate. ''' status = spy._status settings = spy.settings # Number of Perceptron inputs must equal number of features in the # training data. if len(training_data) != self.layers[-1].shape[0]: raise Exception('Number of nodes in output layer must match ' 'number of training classes.') self.training_data = training_data # Map output nodes to class indices self.indices = [cl.index for cl in self.training_data] class_data = [np.array([x for x in cl]) for cl in self.training_data] if samples_per_class > 0: for i in range(len(class_data)): if class_data[i].shape[0] > samples_per_class: class_data[i] = class_data[i][:samples_per_class] X = np.vstack(class_data) y = np.hstack([np.ones(c.shape[0], dtype=np.int16) * i for \ (i, c) in enumerate(class_data)]) Y = np.eye(np.max(y) + 1, dtype=np.int16)[y] if 'stdout' in kwargs: stdout = kwargs.pop('stdout') elif settings.show_progress is True: stdout = status else: stdout = None return Perceptron.train(self, X, Y, *args, stdout=stdout, **kwargs) def classify_spectrum(self, x): ''' Classifies a pixel into one of the trained classes. Arguments: `x` (list or rank-1 ndarray): The unclassified spectrum. Returns: `classIndex` (int): The index for the :class:`~spectral.TrainingClass` to which `x` is classified. ''' y = self.input(x) return self.indices[np.argmax(y)] def classify(self, X, **kwargs): return Classifier.classify(self, X, **kwargs) spectral-0.22.4/spectral/algorithms/clustering.py000066400000000000000000000312051412674721200221420ustar00rootroot00000000000000''' k-means clustering. ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy as np import spectral as spy from .classifiers import Classifier from ..utilities.errors import has_nan, NaNValueError def L1(v1, v2): 'Returns L1 distance between 2 rank-1 arrays.' return np.sum(abs((v1 - v2))) def L2(v1, v2): 'Returns Euclidean distance between 2 rank-1 arrays.' delta = v1 - v2 return np.sqrt(np.dot(delta, delta)) def kmeans(image, nclusters=10, max_iterations=20, **kwargs): ''' Performs iterative clustering using the k-means algorithm. Arguments: `image` (:class:`numpy.ndarray` or :class:`spectral.Image`): The `MxNxB` image on which to perform clustering. `nclusters` (int) [default 10]: Number of clusters to create. The number produced may be less than `nclusters`. `max_iterations` (int) [default 20]: Max number of iterations to perform. Keyword Arguments: `start_clusters` (:class:`numpy.ndarray`) [default None]: `nclusters x B` array of initial cluster centers. If not provided, initial cluster centers will be spaced evenly along the diagonal of the N-dimensional bounding box of the image data. `compare` (callable object) [default None]: Optional comparison function. `compare` must be a callable object that takes 2 `MxN` :class:`numpy.ndarray` objects as its arguments and returns non-zero when clustering is to be terminated. The two arguments are the cluster maps for the previous and current cluster cycle, respectively. `distance` (callable object) [default :func:`~spectral.clustering.L2`]: The distance measure to use for comparison. The default is to use **L2** (Euclidean) distance. For Manhattan distance, specify :func:`~spectral.clustering.L1`. `frames` (list) [default None]: If this argument is given and is a list object, each intermediate cluster map is appended to the list. Returns a 2-tuple containing: `class_map` (:class:`numpy.ndarray`): An `MxN` array whos values are the indices of the cluster for the corresponding element of `image`. `centers` (:class:`numpy.ndarray`): An `nclusters x B` array of cluster centers. Iterations are performed until clusters converge (no pixels reassigned between iterations), `maxIterations` is reached, or `compare` returns nonzero. If :exc:`KeyboardInterrupt` is generated (i.e., CTRL-C pressed) while the algorithm is executing, clusters are returned from the previously completed iteration. ''' logger = logging.getLogger('spectral') if isinstance(image, np.ndarray): return kmeans_ndarray(*(image, nclusters, max_iterations), **kwargs) status = spy._status # defaults for kwargs start_clusters = None compare = None distance = L2 iterations = None for (key, val) in list(kwargs.items()): if key == 'start_clusters': start_clusters = val elif key == 'compare': compare = val elif key == 'distance': if val in (L1, 'L1'): distance = L1 elif val in (L2, 'L2'): distance = L2 else: raise ValueError('Unrecognized keyword argument.') elif key == 'frames': if not hasattr(val, 'append'): raise TypeError('"frames" keyword argument must have "append"' 'attribute.') iterations = val else: raise NameError('Unsupported keyword argument.') (nrows, ncols, nbands) = image.shape clusters = np.zeros((nrows, ncols), int) old_clusters = np.copy(clusters) if start_clusters is not None: assert (start_clusters.shape[0] == nclusters), 'There must be \ nclusters clusters in the startCenters array.' centers = np.array(start_clusters) else: logging.debug('Initializing clusters along diagonal of N-dimensional bounding box.') centers = np.empty((nclusters, nbands), float) boxMin = image[0, 0] boxMax = image[0, 0] for i in range(nrows): for j in range(ncols): x = image[i, j] boxMin = np.where(boxMin < x, boxMin, x) boxMax = np.where(boxMax > x, boxMax, x) boxMin = boxMin.astype(float) boxMax = boxMax.astype(float) delta = (boxMax - boxMin) / (nclusters - 1) for i in range(nclusters): centers[i] = boxMin.astype(float) + i * delta itnum = 1 while (itnum <= max_iterations): try: status.display_percentage('Iteration %d...' % itnum) # Assign all pixels for i in range(nrows): status.update_percentage(float(i) / nrows * 100.) for j in range(ncols): minDist = 1.e30 for k in range(nclusters): dist = distance(image[i, j], centers[k]) if (dist < minDist): clusters[i, j] = k minDist = dist # Update cluster centers sums = np.zeros((nclusters, nbands), 'd') counts = ([0] * nclusters) for i in range(nrows): for j in range(ncols): counts[clusters[i, j]] += 1 sums[clusters[i, j]] += image[i, j] old_centers = centers[:] for i in range(nclusters): if (counts[i] > 0): centers[i] = sums[i] / counts[i] centers = np.array(centers) if iterations is not None: iterations.append(clusters) if compare and compare(old_clusters, clusters): status.end_percentage('done.') break else: nChanged = np.sum(clusters != old_clusters) if nChanged == 0: status.end_percentage('0 pixels reassigned.') break else: status.end_percentage('%d pixels reassigned.' \ % (nChanged)) old_clusters = clusters old_centers = centers clusters = np.zeros((nrows, ncols), int) itnum += 1 except KeyboardInterrupt: print("KeyboardInterrupt: Returning clusters from previous iteration") return (old_clusters, old_centers) logger.info('kmeans terminated with %d clusters after %d iterations', len(set(old_clusters.ravel())), itnum - 1) return (old_clusters, centers) def kmeans_ndarray(image, nclusters=10, max_iterations=20, **kwargs): ''' Performs iterative clustering using the k-means algorithm. Arguments: `image` (:class:`numpy.ndarray` or :class:`spectral.Image`): The `MxNxB` image on which to perform clustering. `nclusters` (int) [default 10]: Number of clusters to create. The number produced may be less than `nclusters`. `max_iterations` (int) [default 20]: Max number of iterations to perform. Keyword Arguments: `start_clusters` (:class:`numpy.ndarray`) [default None]: `nclusters x B` array of initial cluster centers. If not provided, initial cluster centers will be spaced evenly along the diagonal of the N-dimensional bounding box of the image data. `compare` (callable object) [default None]: Optional comparison function. `compare` must be a callable object that takes 2 `MxN` :class:`numpy.ndarray` objects as its arguments and returns non-zero when clustering is to be terminated. The two arguments are the cluster maps for the previous and current cluster cycle, respectively. `distance` (callable object) [default :func:`~spectral.clustering.L2`]: The distance measure to use for comparison. The default is to use **L2** (Euclidean) distance. For Manhattan distance, specify :func:`~spectral.clustering.L1`. `frames` (list) [default None]: If this argument is given and is a list object, each intermediate cluster map is appended to the list. Returns a 2-tuple containing: `class_map` (:class:`numpy.ndarray`): An `MxN` array whos values are the indices of the cluster for the corresponding element of `image`. `centers` (:class:`numpy.ndarray`): An `nclusters x B` array of cluster centers. Iterations are performed until clusters converge (no pixels reassigned between iterations), `max_iterations` is reached, or `compare` returns nonzero. If :exc:`KeyboardInterrupt` is generated (i.e., CTRL-C pressed) while the algorithm is executing, clusters are returned from the previously completed iteration. ''' logger = logging.getLogger('spectral') if has_nan(image): raise NaNValueError('Image data contains NaN values.') # defaults for kwargs start_clusters = None compare = None distance = L2 iterations = None for (key, val) in list(kwargs.items()): if key == 'start_clusters': start_clusters = val elif key == 'compare': compare = val elif key == 'distance': if val in (L1, 'L1'): distance = L1 elif val in (L2, 'L2'): distance = L2 else: raise ValueError('Unrecognized keyword argument.') elif key == 'frames': if not hasattr(val, 'append'): raise TypeError('"frames" keyword argument must have "append"' 'attribute.') iterations = val else: raise NameError('Unsupported keyword argument.') (nrows, ncols, nbands) = image.shape N = nrows * ncols image = image.reshape((N, nbands)) clusters = np.zeros((N,), int) if start_clusters is not None: assert (start_clusters.shape[0] == nclusters), 'There must be \ nclusters clusters in the startCenters array.' centers = np.array(start_clusters) else: logger.debug('Initializing clusters along diagonal of N-dimensional' \ ' bounding box.') boxMin = np.amin(image, 0) boxMax = np.amax(image, 0) delta = (boxMax - boxMin) / (nclusters - 1) centers = np.empty((nclusters, nbands), float) for i in range(nclusters): centers[i] = boxMin + i * delta distances = np.empty((N, nclusters), float) old_centers = np.array(centers) clusters = np.zeros((N,), int) old_clusters = np.copy(clusters) diffs = np.empty_like(image, dtype=np.float64) itnum = 1 while (itnum <= max_iterations): try: # Assign all pixels for i in range(nclusters): diffs = np.subtract(image, centers[i], out=diffs) if distance == L2: distances[:, i] = np.einsum('ij,ij->i', diffs, diffs) else: diffs = np.abs(diffs, out=diffs) distances[:, i] = np.einsum('ij->i', diffs) clusters[:] = np.argmin(distances, 1) # Update cluster centers old_centers[:] = centers for i in range(nclusters): inds = np.argwhere(clusters == i)[:, 0] if len(inds) > 0: centers[i] = np.mean(image[inds], 0, float) if iterations is not None: iterations.append(clusters.reshape(nrows, ncols)) if compare and compare(old_clusters, clusters): break else: nChanged = np.sum(clusters != old_clusters) logger.info('k-means iteration {} - {} pixels reassigned.' \ .format(itnum, nChanged)) if nChanged == 0: break old_clusters[:] = clusters old_centers[:] = centers itnum += 1 except KeyboardInterrupt: print("KeyboardInterrupt: Returning clusters from previous iteration.") return (old_clusters.reshape(nrows, ncols), old_centers) logger.info('kmeans terminated with %d clusters after %d iterations.', len(set(old_clusters.ravel())), itnum - 1) return (old_clusters.reshape(nrows, ncols), centers) spectral-0.22.4/spectral/algorithms/continuum.py000066400000000000000000000345261412674721200220150ustar00rootroot00000000000000''' Continuum and continuum removal. Continuum is defined as convex hull of spectrum. Continuum is removed from spectra by dividing spectra by its continuum. That results in values between 0 and 1, where absorption bands are expressed as drops below 1. It is usefull for comparing and classification based on absorption bands and indifferent to scale. References: Clark, R.N. and Roush, L. (1984) Reflectance Spectroscopy Quantitative Analysis Techniques for Remote Sensing Applications. Journal of Geophysical Research, 89, 6329-6340. http://dx.doi.org/10.1029/JB089iB07p06329 Jiwei Bai, et al., "Classification methods of the hyperspectralimage based on the continuum-removed," Proc. SPIE 4897, Multispectral and Hyperspectral Remote Sensing Instruments and Applications, (16 June 2003); doi: 10.1117/12.466729 Lehnert, Lukas & Meyer, Hanna & Obermeier, Wolfgang & Silva, Brenner & Regeling, Bianca & Thies, Boris & Bendix, Jorg. (2019). Hyperspectral Data Analysis in R: The hsdar Package. Journal of statistical software. 89. 1-23. 10.18637/jss.v089.i12. ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy as np import spectral as spy from ..utilities.errors import has_nan, NaNValueError def _segment_concave_region(spectrum, bands, indices, ind_fill, ibegin, iend): # Here we don't search for local maxima w.r.t. line that connects ends of this region. # That is behavior of the hsdar. It also makes more sense in the context of # hyperspectral image analysis. We are already not convex, and we can't # include all points that make result quasi-convex, since there will be too # many of them, often right one after another. However, filtering local maxima, # below, will make result quasi-convex. # Notice that we are using >=, not strict >. That will include maxima that # are flat, that stretch few points. It will also include local minima, # but just as with local maxima that are too low below slope line, these # will be filtered out. is_maximum = np.logical_and(spectrum[ibegin+1:iend-1] >= spectrum[ibegin:iend-2], spectrum[ibegin+1:iend-1] >= spectrum[ibegin+2:iend]) # Get local maxima indices. (Note that where return tuple for each dimension). lmi = np.where(is_maximum)[0] # No local maxima, return. if len(lmi) == 0: return ind_fill # Make it relative to input array - spectrum. lmi += ibegin + 1 # Get local maxima. local_maxima = spectrum[lmi] # Filter those maxima that cause slope between them to change direction. # This makes remaining maxima, satisfy quasy-convexity condition. slope_dir = spectrum[iend-1] - spectrum[ibegin] filtered_indices = [] if slope_dir >= 0.0: last_included_value = spectrum[ibegin] for i in range(len(local_maxima)): lm = local_maxima[i] if lm > last_included_value: filtered_indices.append(lmi[i]) last_included_value = lm else: # Slope is negative. Start from back. last_included_value = spectrum[iend-1] for i in range(len(local_maxima) - 1, -1, -1): lm = local_maxima[i] if lm > last_included_value: filtered_indices.append(lmi[i]) last_included_value = lm filtered_indices.reverse() # Take only valid local maxima indices. lmi = filtered_indices # If there is no valid local maxima indices, return. if len(lmi) == 0: return ind_fill # Add indices to result, and process subregions between them with convex hull # algorithm, to make sure all input points and below resulting hull. next_ibegin = ibegin for i in lmi: # There should be at least 1 point between edges, to call _find_indices_in_range. # However, these are to local maxima, and if there is one point between them, # it must be below both. So only for two points inside region borders # call _find_indices_in_range. if i > next_ibegin + 2: # Put hull around points in subregion. ind_fill = _find_indices_in_range( spectrum, bands, False, indices, ind_fill, next_ibegin, i + 1) indices[ind_fill] = i ind_fill += 1 next_ibegin = i # Don't miss the last range. ind_fill = _find_indices_in_range( spectrum, bands, False, indices, ind_fill, lmi[-1], iend) return ind_fill def _find_indices_in_range(spectrum, bands, segmented, indices, ind_fill, ibegin, iend): iendi = iend - 1 # We search for maximum, but not from the x axis. # We search for maximum w.r.t to axis represented by line connecting # first and last point (of this iteration). # First find normal to new axis. Swap x and y, and negate new x. # If we negate x instead of y, normal will always point upward. naxis_y = bands[iendi] - bands[ibegin] naxis_x = spectrum[ibegin] - spectrum[iendi] # Don't literally compute distance from the axis. Rather, calculate dot products # of points with the normal, and find the largest. The largest dot product (it does not have to be positive) # is the one that goes more in the direction of normal than others. To get the distance, # we could divide each dot product by norm/length of the normal. But that is constant, # and does not effect which one is maximum. # Note that here we include first point of the range, but not last. imax = np.argmax(bands[ibegin:iendi] * naxis_x + spectrum[ibegin:iendi] * naxis_y) + ibegin # If first point is maximum, then all others are "below" the axis, # which means this is concave region. if imax == ibegin: # If we are in segmented upper hull mode, then segment concave region. # For that to make sense, we need at least 3 elements between edges. if segmented and iend - ibegin > 5: ind_fill = _segment_concave_region( spectrum, bands, indices, ind_fill, ibegin, iend) return ind_fill # Repeat same procedure on the left side, if there are enough points left. # At least 1 is required between first and last point in range. if imax > ibegin + 1: ind_fill = _find_indices_in_range( spectrum, bands, segmented, indices, ind_fill, ibegin, imax + 1) # Push middle index. indices[ind_fill] = imax ind_fill += 1 # Repeat for the right side. if imax < iend - 2: ind_fill = _find_indices_in_range( spectrum, bands, segmented, indices, ind_fill, imax, iend) return ind_fill def _find_continuum_points_recursive(spectrum, bands, segmented, indices): n = len(spectrum) indices[0] = 0 ind_fill = 1 ind_fill = _find_indices_in_range( spectrum, bands, segmented, indices, ind_fill, 0, n) indices[ind_fill] = n - 1 indices = indices[:ind_fill + 1] return (bands[indices], spectrum[indices]) def _process_continuum(spectra, bands, remove_continuum, segmented, out): if not isinstance(spectra, np.ndarray): raise TypeError('Expected spectra to be a numpy.ndarray.') if not isinstance(bands, np.ndarray): raise TypeError('Expected bands to be a numpy.ndarray.') if out is not None and not isinstance(out, np.ndarray): raise TypeError('Expected out to be a numpy.ndarray or None.') if len(spectra.shape) not in (1, 2, 3): raise ValueError('Expected spectra to be 1d, 2d, or 3d array.') if len(bands.shape) != 1: raise ValueError('Expected bands to be 1d array.') if out is not None and not np.array_equal(out.shape, spectra.shape): raise ValueError('Expected out to be same shape as spectra.') out = np.empty_like(spectra) if out is None else out # In case we remove continuum, always divide out by continuum, # to avoid creating additional temporary array. if spectra is not out and remove_continuum: out[:] = spectra[:] original_shape = spectra.shape nbands = original_shape[-1] interp = np.interp indices = np.empty(nbands, np.int64) if len(spectra.shape) == 1: points = _find_continuum_points_recursive( spectra, bands, segmented, indices) continuum = interp(bands, points[0], points[1]) if remove_continuum: out /= continuum else: out[:] = continuum elif len(spectra.shape) == 2: for i in range(spectra.shape[0]): points = _find_continuum_points_recursive( spectra[i], bands, segmented, indices) continuum = interp(bands, points[0], points[1]) if remove_continuum: out[i, :] /= continuum else: out[i, :] = continuum else: for i in range(spectra.shape[0]): for j in range(spectra.shape[1]): points = _find_continuum_points_recursive( spectra[i, j], bands, segmented, indices) continuum = interp(bands, points[0], points[1]) if remove_continuum: out[i, j, :] /= continuum else: out[i, j, :] = continuum return out def continuum_points(spectrum, bands, mode='convex'): '''Returns points of spectra that belong to it's continuum. Arguments: `spectrum` (:class:`numpy.ndarray`) 1d :class:`numpy.ndarray` holding spectral signature. `bands` (:class:`numpy.ndarray`): 1d :class:`numpy.ndarray`, holding band values of spectra. Length of `bands` should be the same as `spectrum`. Note that bands should be sorted in ascending order (which is often not the case with AVIRIS), otherwise unexpected results could occure. `mode` (string, default 'convex'): Default mode is 'convex' which returns convex upper hull of the spectrum. Another supported mode is 'segmented' which builds segmented upper hull. This is usefull to identify more detailed contour of the spectrum, but without strong absorption bands. Returns: 2-tuple, with each element being :class:`numpy.ndarray`. First element contains reflectance values of points that belong to continuum. Second element contains corresponding bands. By applying linear interpolation to this data as x and y, we get continuum of spectrum. However this function is particularly useful to applying other interpolations or any other processing on these points. ''' if not isinstance(spectrum, np.ndarray): raise TypeError('Expected spectra to be a numpy.ndarray.') if not isinstance(bands, np.ndarray): raise TypeError('Expected bands to be a numpy.ndarray.') if len(spectrum.shape) != 1: raise ValueError('Expected spectra to be 1d array.') if len(bands.shape) != 1: raise ValueError('Expected bands to be 1d array.') indices = np.empty_like(spectrum, dtype='int64') return _find_continuum_points_recursive(spectrum, bands, mode == 'segmented', indices) def spectral_continuum(spectra, bands, mode='convex', out=None): '''Returns continua of spectra. Continuum is defined as convex hull of spectra. Arguments: `spectra` (:class:`numpy.ndarray`) Can be 1d, 2d or 3d :class:`numpy.ndarray`, where last dimension holds individual spectra. `bands` (:class:`numpy.ndarray`): 1d :class:`numpy.ndarray`, holding band values of spectra. Length of `bands` should be the same as last dimension of `spectra`. Note that bands should be sorted in ascending order (which is often not the case with AVIRIS), otherwise unexpected results could occure. `mode` (string, default 'convex'): Default mode is 'convex' which returns convex upper hull of the spectrum. Another supported mode is 'segmented' which builds segmented upper hull. This is usefull to identify more detailed contour of the spectrum, but without strong absorption bands. `out` (:class:`numpy.ndarray`, default None): If provided, it must have same type and same shape as `spectra`, and it will hold the result, and will be returned as result of this function. Returns: A :class:`numpy.ndarray` of continua for each spectrum in spectra. It same type and shape as spectra. If `out` is provided, `out` will be returned. ''' return _process_continuum(spectra, bands, False, mode == 'segmented', out) def remove_continuum(spectra, bands, mode='convex', out=None): '''Returns spectra with continuum removed. Continuum is defined as convex hull of spectra. Continuum is removed from spectra by deviding spectra by its continuum. Arguments: `spectra` (:class:`numpy.ndarray`) Can be 1d, 2d or 3d :class:`numpy.ndarray`, where last dimension holds individual spectra. `bands` (:class:`numpy.ndarray`): 1d :class:`numpy.ndarray`, holding band values of spectra. Length of `bands` should be the same as last dimension of `spectra`. Note that bands should be sorted in ascending order (which is often not the case with AVIRIS), otherwise unexpected results could occure. `mode` (string, default 'convex'): Default mode is 'convex' which removes convex upper hull of the spectrum. Another supported mode is 'segmented' which removes segmented upper hull. This is usefull to identify two or more small features instead of one large feature. `out` (:class:`numpy.ndarray`, default None): If provided, it must have type `np.float64` and same shape as `spectra`, and it will hold the result, and will be returned as result of this function. Returns: A :class:`numpy.ndarray` of continua for in spectrum in spectra. It type `np.float64` and same shape as spectra. If `out` is provided, `out` will be returned. ''' if out is not None and out.dtype != np.float64: raise ValueError('Expected out to have dtype float64. ' 'Results of continuum removal are floating point numbers.') return _process_continuum(spectra, bands, True, mode == 'segmented', out) spectral-0.22.4/spectral/algorithms/detectors.py000066400000000000000000000635251412674721200217710ustar00rootroot00000000000000''' Spectral target detection algorithms. ''' from __future__ import absolute_import, division, print_function, unicode_literals __all__ = ['MatchedFilter', 'matched_filter', 'RX', 'rx', 'ace'] import math import numpy as np from .algorithms import calc_stats from .transforms import LinearTransform from .spatial import map_outer_window_stats from .spymath import matrix_sqrt class MatchedFilter(LinearTransform): r'''A callable linear matched filter. Given target/background means and a common covariance matrix, the matched filter response is given by: .. math:: y=\frac{(\mu_t-\mu_b)^T\Sigma^{-1}(x-\mu_b)}{(\mu_t-\mu_b)^T\Sigma^{-1}(\mu_t-\mu_b)} where :math:`\mu_t` is the target mean, :math:`\mu_b` is the background mean, and :math:`\Sigma` is the covariance. ''' def __init__(self, background, target): '''Creates the filter, given background/target means and covariance. Arguments: `background` (`GaussianStats`): The Gaussian statistics for the background (e.g., the result of calling :func:`calc_stats`). `target` (ndarray): Length-K target mean ''' self.background = background self.u_b = background.mean self.u_t = target self._whitening_transform = None d_tb = (target - self.u_b) self.d_tb = d_tb C_1 = background.inv_cov self.C_1 = C_1 # Normalization coefficient (inverse of squared Mahalanobis distance # between u_t and u_b) self.coef = 1.0 / d_tb.dot(C_1).dot(d_tb) LinearTransform.__init__( self, (self.coef * d_tb).dot(C_1), pre=-self.u_b) def whiten(self, X): '''Transforms data to the whitened space of the background. Arguments: `X` (ndarray): Size (M,N,K) or (M*N,K) array of length K vectors to transform. Returns an array of same size as `X` but linearly transformed to the whitened space of the filter. ''' if self._whitening_transform is None: A = math.sqrt(self.coef) * self.background.sqrt_inv_cov self._whitening_transform = LinearTransform(A, pre=-self.u_b) return self._whitening_transform(X) def matched_filter(X, target, background=None, window=None, cov=None): r'''Computes a linear matched filter target detector score. Usage: y = matched_filter(X, target, background) y = matched_filter(X, target, window= [, cov=]) Given target/background means and a common covariance matrix, the matched filter response is given by: .. math:: y=\frac{(\mu_t-\mu_b)^T\Sigma^{-1}(x-\mu_b)}{(\mu_t-\mu_b)^T\Sigma^{-1}(\mu_t-\mu_b)} where :math:`\mu_t` is the target mean, :math:`\mu_b` is the background mean, and :math:`\Sigma` is the covariance. Arguments: `X` (numpy.ndarray): For the first calling method shown, `X` can be an image with shape (R, C, B) or an ndarray of shape (R * C, B). If the `background` keyword is given, it will be used for the image background statistics; otherwise, background statistics will be computed from `X`. If the `window` keyword is given, `X` must be a 3-dimensional array and background statistics will be computed for each point in the image using a local window defined by the keyword. `target` (ndarray): Length-K vector specifying the target to be detected. `background` (`GaussianStats`): The Gaussian statistics for the background (e.g., the result of calling :func:`calc_stats` for an image). This argument is not required if `window` is given. `window` (2-tuple of odd integers): Must have the form (`inner`, `outer`), where the two values specify the widths (in pixels) of inner and outer windows centered about the pixel being evaulated. Both values must be odd integers. The background mean and covariance will be estimated from pixels in the outer window, excluding pixels within the inner window. For example, if (`inner`, `outer`) = (5, 21), then the number of pixels used to estimate background statistics will be :math:`21^2 - 5^2 = 416`. If this argument is given, `background` is not required (and will be ignored, if given). The window is modified near image borders, where full, centered windows cannot be created. The outer window will be shifted, as needed, to ensure that the outer window still has height and width `outer` (in this situation, the pixel being evaluated will not be at the center of the outer window). The inner window will be clipped, as needed, near image borders. For example, assume an image with 145 rows and columns. If the window used is (5, 21), then for the image pixel at (0, 0) (upper left corner), the the inner window will cover `image[:3, :3]` and the outer window will cover `image[:21, :21]`. For the pixel at (50, 1), the inner window will cover `image[48:53, :4]` and the outer window will cover `image[40:51, :21]`. `cov` (ndarray): An optional covariance to use. If this parameter is given, `cov` will be used for all matched filter calculations (background covariance will not be recomputed in each window) and only the background mean will be recomputed in each window. If the `window` argument is specified, providing `cov` will allow the result to be computed *much* faster. Returns numpy.ndarray: The return value will be the matched filter scores distance) for each pixel given. If `X` has shape (R, C, K), the returned ndarray will have shape (R, C). ''' if background is not None and window is not None: raise ValueError('`background` and `window` are mutually ' \ 'exclusive arguments.') if window is not None: def mf_wrapper(bg, x): return MatchedFilter(bg, target)(x) return map_outer_window_stats(mf_wrapper, X, window[0], window[1], dim_out=1, cov=cov) else: if background is None: background = calc_stats(X) return MatchedFilter(background, target)(X) class RX(): r'''An implementation of the RX anomaly detector. Given the mean and covariance of the background, this detector returns the squared Mahalanobis distance of a spectrum according to .. math:: y=(x-\mu_b)^T\Sigma^{-1}(x-\mu_b) where `x` is the unknown pixel spectrum, :math:`\mu_b` is the background mean, and :math:`\Sigma` is the background covariance. References: Reed, I.S. and Yu, X., "Adaptive multiple-band CFAR detection of an optical pattern with unknown spectral distribution," IEEE Trans. Acoust., Speech, Signal Processing, vol. 38, pp. 1760-1770, Oct. 1990. ''' dim_out=1 def __init__(self, background=None): '''Creates the detector, given optional background/target stats. Arguments: `background` (`GaussianStats`, default None): The Gaussian statistics for the background (e.g., the result of calling :func:`calc_stats`). If no background stats are provided, they will be estimated based on data passed to the detector. ''' if background is not None: self.set_background(background) else: self.background = None def set_background(self, stats): '''Sets background statistics to be used when applying the detector.''' self.background = stats def __call__(self, X): '''Applies the RX anomaly detector to X. Arguments: `X` (numpy.ndarray): For an image with shape (R, C, B), `X` can be a vector of length B (single pixel) or an ndarray of shape (R, C, B) or (R * C, B). Returns numpy.ndarray or float: The return value will be the RX detector score (squared Mahalanobis distance) for each pixel given. If `X` is a single pixel, a float will be returned; otherwise, the return value will be an ndarray of floats with one less dimension than the input. ''' if not isinstance(X, np.ndarray): raise TypeError('Expected a numpy.ndarray.') if self.background is None: self.set_background(calc_stats(X)) X = (X - self.background.mean) C_1 = self.background.inv_cov ndim = X.ndim shape = X.shape if ndim == 1: return X.dot(C_1).dot(X) if ndim == 3: X = X.reshape((-1, X.shape[-1])) A = X.dot(C_1) r = np.einsum('ij,ij->i', A, X) return r.reshape(shape[:-1]) # I tried using einsum for the above calculations but, surprisingly, # it was *much* slower than using dot & sum. Need to figure out if # that is due to multithreading or some other reason. # print 'ndim =', ndim # if ndim == 1: # return np.einsum('i,ij,j', X, self.background.inv_cov, X) # if ndim == 3: # return np.einsum('ijk,km,ijm->ij', # X, self.background.inv_cov, X).squeeze() # elif ndim == 2: # return np.einsum('ik,km,im->i', # X, self.background.inv_cov, X).squeeze() # else: # raise Exception('Unexpected number of dimensions.') # def rx(X, background=None, window=None, cov=None): r'''Computes RX anomaly detector scores. Usage: y = rx(X [, background=bg]) y = rx(X, window=(inner, outer) [, cov=C]) The RX anomaly detector produces a detection statistic equal to the squared Mahalanobis distance of a spectrum from a background distribution according to .. math:: y=(x-\mu_b)^T\Sigma^{-1}(x-\mu_b) where `x` is the pixel spectrum, :math:`\mu_b` is the background mean, and :math:`\Sigma` is the background covariance. Arguments: `X` (numpy.ndarray): For the first calling method shown, `X` can be an image with shape (R, C, B) or an ndarray of shape (R * C, B). If the `background` keyword is given, it will be used for the image background statistics; otherwise, background statistics will be computed from `X`. If the `window` keyword is given, `X` must be a 3-dimensional array and background statistics will be computed for each point in the image using a local window defined by the keyword. `background` (`GaussianStats`): The Gaussian statistics for the background (e.g., the result of calling :func:`calc_stats`). If no background stats are provided, they will be estimated based on data passed to the detector. `window` (2-tuple of odd integers): Must have the form (`inner`, `outer`), where the two values specify the widths (in pixels) of inner and outer windows centered about the pixel being evaulated. Both values must be odd integers. The background mean and covariance will be estimated from pixels in the outer window, excluding pixels within the inner window. For example, if (`inner`, `outer`) = (5, 21), then the number of pixels used to estimate background statistics will be :math:`21^2 - 5^2 = 416`. The window are modified near image borders, where full, centered windows cannot be created. The outer window will be shifted, as needed, to ensure that the outer window still has height and width `outer` (in this situation, the pixel being evaluated will not be at the center of the outer window). The inner window will be clipped, as needed, near image borders. For example, assume an image with 145 rows and columns. If the window used is (5, 21), then for the image pixel at (0, 0) (upper left corner), the the inner window will cover `image[:3, :3]` and the outer window will cover `image[:21, :21]`. For the pixel at (50, 1), the inner window will cover `image[48:53, :4]` and the outer window will cover `image[40:51, :21]`. `cov` (ndarray): An optional covariance to use. If this parameter is given, `cov` will be used for all RX calculations (background covariance will not be recomputed in each window) and only the background mean will be recomputed in each window. Returns numpy.ndarray: The return value will be the RX detector score (squared Mahalanobis distance) for each pixel given. If `X` has shape (R, C, B), the returned ndarray will have shape (R, C).. References: Reed, I.S. and Yu, X., "Adaptive multiple-band CFAR detection of an optical pattern with unknown spectral distribution," IEEE Trans. Acoust., Speech, Signal Processing, vol. 38, pp. 1760-1770, Oct. 1990. ''' if background is not None and window is not None: raise ValueError('`background` and `window` keywords are mutually ' \ 'exclusive.') if window is not None: rx = RX() def rx_wrapper(bg, x): rx.set_background(bg) return rx(x) return map_outer_window_stats(rx_wrapper, X, window[0], window[1], dim_out=1, cov=cov) else: return RX(background)(X) class ACE(): r'''Adaptive Coherence/Cosine Estimator (ACE). ''' def __init__(self, target, background=None, **kwargs): '''Creates the callable detector for target and background. Arguments: `target` (ndarray or sequence of ndarray): Can be either: A length-B ndarray. In this case, `target` specifies a single target spectrum to be detected. The return value will be an ndarray with shape (R, C). An ndarray with shape (D, B). In this case, `target` contains `D` length-B targets that define a subspace for the detector. The return value will be an ndarray with shape (R, C). `background` (`GaussianStats`): The Gaussian statistics for the background (e.g., the result of calling :func:`calc_stats`). If no background stats are provided, they will be estimated based on data passed to the detector. Keyword Arguments: `vectorize` (bool, default True): Specifies whether the __call__ method should attempt to vectorize operations. This typicall results in faster computation but will consume more memory. ''' for k in kwargs: if k not in ('vectorize'): raise ValueError('Invalid keyword: {0}'.format(k)) self.vectorize = kwargs.get('vectorize', True) self._target = None self._background = None self.set_target(target) if background is not None: self.set_background(background) else: self._background = None def set_target(self, target): '''Specifies target or target subspace used by the detector. Arguments: `target` (ndarray or sequence of ndarray): Can be either: A length-B ndarray. In this case, `target` specifies a single target spectrum to be detected. The return value will be an ndarray with shape (R, C). An ndarray with shape (D, B). In this case, `target` contains `D` length-B targets that define a subspace for the detector. The return value will be an ndarray with shape (R, C). ''' if target is None: self._target = None else: self._target = np.array(target, ndmin=2) self._update_constants() def set_background(self, stats): '''Sets background statistics to be used when applying the detector. Arguments: `stats` (`GaussianStats`): The Gaussian statistics for the background (e.g., the result of calling :func:`calc_stats`). If no background stats are provided, they will be estimated based on data passed to the detector. ''' self._background = stats self._update_constants() def _update_constants(self): '''Computes and caches constants used when applying the detector.''' if self._background is not None and self._target is not None: if self._background.mean is not None: target = (self._target - self._background.mean).T else: target = self._target.T self._S = self._background.sqrt_inv_cov.dot(target) self._P = self._S.dot(np.linalg.pinv(self._S)) else: self._C = None self._P = None def __call__(self, X): '''Compute ACE detector scores for X. Arguments: `X` (numpy.ndarray): For an image with shape (R, C, B), `X` can be a vector of length B (single pixel) or an ndarray of shape (R, C, B) or (R * C, B). Returns numpy.ndarray or float: The return value will be the RX detector score (squared Mahalanobis distance) for each pixel given. If `X` is a single pixel, a float will be returned; otherwise, the return value will be an ndarray of floats with one less dimension than the input. ''' if not isinstance(X, np.ndarray): raise TypeError('Expected a numpy.ndarray.') shape = X.shape if X.ndim == 1: # Compute ACE score for single pixel if self._background.mean is not None: X = X - self._background.mean z = self._background.sqrt_inv_cov.dot(X) return z.dot(self._P).dot(z) / (z.dot(z)) if self._background is None: self.set_background(calc_stats(X)) if self.vectorize: # Compute all scores at once if self._background.mean is not None: X = X - self._background.mean if X.ndim == 3: X = X.reshape((-1, X.shape[-1])) z = self._background.sqrt_inv_cov.dot(X.T).T zP = np.dot(z, self._P) zPz = np.einsum('ij,ij->i', zP, z) zz = np.einsum('ij,ij->i', z, z) return (zPz / zz).reshape(shape[:-1]) else: # Call recursively for each pixel return np.apply_along_axis(self, -1, X) def ace(X, target, background=None, window=None, cov=None, **kwargs): r'''Returns Adaptive Coherence/Cosine Estimator (ACE) detection scores. Usage: y = ace(X, target, background) y = ace(X, target, window= [, cov=]) Arguments: `X` (numpy.ndarray): For the first calling method shown, `X` can be an ndarray with shape (R, C, B) or an ndarray of shape (R * C, B). If the `background` keyword is given, it will be used for the image background statistics; otherwise, background statistics will be computed from `X`. If the `window` keyword is given, `X` must be a 3-dimensional array and background statistics will be computed for each point in the image using a local window defined by the keyword. `target` (ndarray or sequence of ndarray): If `X` has shape (R, C, B), `target` can be any of the following: A length-B ndarray. In this case, `target` specifies a single target spectrum to be detected. The return value will be an ndarray with shape (R, C). An ndarray with shape (D, B). In this case, `target` contains `D` length-B targets that define a subspace for the detector. The return value will be an ndarray with shape (R, C). A length-D sequence (e.g., list or tuple) of length-B ndarrays. In this case, the detector will be applied seperately to each of the `D` targets. This is equivalent to calling the function sequentially for each target and stacking the results but is much faster. The return value will be an ndarray with shape (R, C, D). `background` (`GaussianStats`): The Gaussian statistics for the background (e.g., the result of calling :func:`calc_stats` for an image). This argument is not required if `window` is given. `window` (2-tuple of odd integers): Must have the form (`inner`, `outer`), where the two values specify the widths (in pixels) of inner and outer windows centered about the pixel being evaulated. Both values must be odd integers. The background mean and covariance will be estimated from pixels in the outer window, excluding pixels within the inner window. For example, if (`inner`, `outer`) = (5, 21), then the number of pixels used to estimate background statistics will be :math:`21^2 - 5^2 = 416`. If this argument is given, `background` is not required (and will be ignored, if given). The window is modified near image borders, where full, centered windows cannot be created. The outer window will be shifted, as needed, to ensure that the outer window still has height and width `outer` (in this situation, the pixel being evaluated will not be at the center of the outer window). The inner window will be clipped, as needed, near image borders. For example, assume an image with 145 rows and columns. If the window used is (5, 21), then for the image pixel at (0, 0) (upper left corner), the the inner window will cover `image[:3, :3]` and the outer window will cover `image[:21, :21]`. For the pixel at (50, 1), the inner window will cover `image[48:53, :4]` and the outer window will cover `image[40:51, :21]`. `cov` (ndarray): An optional covariance to use. If this parameter is given, `cov` will be used for all matched filter calculations (background covariance will not be recomputed in each window) and only the background mean will be recomputed in each window. If the `window` argument is specified, providing `cov` will allow the result to be computed *much* faster. Keyword Arguments: `vectorize` (bool, default True): Specifies whether the function should attempt to vectorize operations. This typicall results in faster computation but will consume more memory. Returns numpy.ndarray: The return value will be the ACE scores for each input pixel. The shape of the returned array will be either (R, C) or (R, C, D), depending on the value of the `target` argument. References: Kraut S. & Scharf L.L., "The CFAR Adaptive Subspace Detector is a Scale- Invariant GLRT," IEEE Trans. Signal Processing., vol. 47 no. 9, pp. 2538-41, Sep. 1999 ''' if background is not None and window is not None: raise ValueError('`background` and `window` keywords are mutually ' \ 'exclusive.') detector = ACE(target, background, **kwargs) if window is None: # Use common background statistics for all pixels if isinstance(target, np.ndarray): # Single detector score for target subspace for each pixel result = detector(X) else: # Separate score arrays for each target in target list if background is None: detector.set_background(calc_stats(X)) def apply_to_target(t): detector.set_target(t) return detector(X) result = np.array([apply_to_target(t) for t in target]) if result.ndim == 3: result = result.transpose(1, 2, 0) else: # Compute local background statistics for each pixel if isinstance(target, np.ndarray): # Single detector score for target subspace for each pixel def ace_wrapper(bg, x): detector.set_background(bg) return detector(x) result = map_outer_window_stats(ace_wrapper, X, window[0], window[1], dim_out=1, cov=cov) else: # Separate score arrays for each target in target list def apply_to_target(t, x): detector.set_target(t) return detector(x) def ace_wrapper(bg, x): detector.set_background(bg) return [apply_to_target(t, x) for t in target] result = map_outer_window_stats(ace_wrapper, X, window[0], window[1], dim_out=len(target), cov=cov) if result.ndim == 3: result = result.transpose(1, 2, 0) # Convert NaN values to zero result = np.nan_to_num(result) if isinstance(result, np.ndarray): return np.clip(result, 0, 1, out=result) else: return np.clip(result, 0, 1) spectral-0.22.4/spectral/algorithms/perceptron.py000066400000000000000000000366431412674721200221570ustar00rootroot00000000000000''' Classes and functions for classification with neural networks. ''' from __future__ import absolute_import, division, print_function, unicode_literals import itertools import math import numpy as np import os import sys class PerceptronLayer: '''A multilayer perceptron layer with sigmoid activation function.''' def __init__(self, shape, k=1.0, weights=None): ''' Arguments: `shape` (2-tuple of int): Should have the form (`num_inputs`, `num_neurons`), where `num_inputs` does not include an input for the bias weights. `k` (float): Sigmoid shape parameter. `weights` (ndarray): Initial weights for the layer. Note that if provided, this argument must have shape (`num_neurons`, `num_inputs` + 1). If not provided, initial weights will be randomized. ''' self.k = k self.shape = (shape[1], shape[0] + 1) if weights: if weights.shape != self.shape: raise Exception('Shape of weight matrix does not ' \ 'match Perceptron layer shape.') self.weights = np.array(weights, dtype=np.float64) else: self.randomize_weights() self.dW = np.zeros_like(self.weights) self.dW_buf = np.zeros_like(self.dW) self.x = np.ones(self.shape[1], float) def randomize_weights(self): '''Randomizes the layer weight matrix. The bias weight will be in the range [0, 1). The remaining weights will correspond to a vector with unit length and uniform random orienation. ''' self.weights = 1. - 2. * np.random.rand(*self.shape) for row in self.weights: row[1:] /= math.sqrt(np.sum(row[1:]**2)) row[0] = -0.5 * np.random.rand() - 0.5 * np.sum(row[1:]) def input(self, x, clip=0.0): '''Sets layer input and computes output. Arguments: `x` (sequence): Layer input, not including bias input. `clip` (float >= 0): Optional clipping value to limit sigmoid output. The sigmoid function has output in the range (0, 1). If the `clip` argument is set to `a` then all neuron outputs for the layer will be constrained to the range [a, 1 - a]. This can improve perceptron learning rate in some situations. Return value: The ndarray of output values is returned and is also set in the `y` attribute of the layer. For classifying samples, call `classify` instead. ''' self.x[1:] = x self.z = np.dot(self.weights, self.x) if clip > 0.: self.y = np.clip(self.g(self.z), clip, 1. - clip) else: self.y = self.g(self.z) return self.y def g(self, a): '''Neuron activation function (logistic sigmoid)''' return 1. / (1. + np.exp(- self.k * a)) def dy_da(self): '''Derivative of activation function at current activation level.''' return self.k * (self.y * (1.0 - self.y)) class Perceptron: ''' A Multi-Layer Perceptron network with backpropagation learning.''' def __init__(self, layers, k=1.0): ''' Creates the Perceptron network. Arguments: layers (sequence of integers): A list specifying the network structure. `layers`[0] is the number of inputs. `layers`[-1] is the number of perceptron outputs. `layers`[1: -1] are the numbers of units in the hidden layers. `k` (float): Sigmoid shape parameter. ''' if type(layers) != list or len(layers) < 2: raise Exception('ERROR: Perceptron argument must be list of 2 or ' 'more integers.') self.shape = layers[:] self.layers = [PerceptronLayer((layers[i - 1], layers[i]), k) for i in range(1, len(layers))] self.accuracy = 0 self.error = 0 # To prevent overflow when scaling inputs self.min_input_diff = 1.e-8 # If True, previous iteration weights are preserved after interrupting # training (with CTRL-C) self.cache_weights = True def input(self, x, clip=0.0): '''Sets Perceptron input, activates neurons and sets & returns output. Arguments: `x` (sequence): Inputs to input layer. Should not include a bias input. `clip` (float >= 0): Optional clipping value to limit sigmoid output. The sigmoid function has output in the range (0, 1). If the `clip` argument is set to `a` then all neuron outputs for the layer will be constrained to the range [a, 1 - a]. This can improve perceptron learning rate in some situations. For classifying samples, call `classify` instead of `input`. ''' self.x = x[:] x = self._scale * (x - self._offset) for layer in self.layers: x = layer.input(x, clip) self.y = np.array(x) return x def classify(self, x): '''Classifies the given sample. This has the same result as calling input and rounding the result. ''' return [int(round(xx)) for xx in self.input(x)] def train(self, X, Y, max_iterations=10000, accuracy=100.0, rate=0.3, momentum=0., batch=1, clip=0.0, on_iteration=None, stdout=sys.stdout): ''' Trains the Perceptron to classify the given samples. Arguments: `X`: The sequence of observations to be learned. Each element of `X` must have a length corresponding to the input layer of the network. Values in `X` are not required to be scaled. `Y`: Truth values corresponding to elements of `X`. `Y` must contain as many elements as `X` and each element of `Y` must contain a number of elements corresponding to the output layer of the network. All values in `Y` should be in the range [0, 1] and for training a classifier, values in `Y` are typically *only* 0 or 1 (i.e., no intermediate values). `max_iterations` (int): Maximum number of iterations through the data to perform. Training will end sooner if the specified accuracy is reached in fewer iterations. `accuracy` (float): The percent training accuracy at which to terminate training, if the maximum number of iterations are not reached first. This value can be set greater than 100 to force a specified number of training iterations to be performed (e.g., to continue reducing the error term after 100% classification accuracy has been achieved. `rate` (float): The perceptron learning rate (typically in the range (0, 1]). `momentum` (float): The perceptron learning momentum term, which specifies the fraction of the previous update value that should be added to the current update term. The value should be in the range [0, 1). `batch` (positive integer): Specifies how many samples should be evaluated before an update is made to the perceptron weights. A value of 0 indicates batch updates should be performed (evaluate all training inputs prior to updating). Otherwise, updates will be aggregated for every `batch` inputs (i.e., `batch` == 1 is stochastic learning). `clip` (float >= 0): Optional clipping value to limit sigmoid output during training. The sigmoid function has output in the range (0, 1). If the `clip` argument is set to `a` then all neuron outputs for the layer will be constrained to the range [a, 1 - a]. This can improve perceptron learning rate in some situations. After training the perceptron with a clipping value, `train` can be called again with clipping set to 0 to continue reducing the training error. `on_iteration` (callable): A callable object that accepts the perceptron as input and returns bool. If this argument is set, the object will be called at the end of each training iteration with the perceptron as its argument. If the callable returns True, training will terminate. `stdout`: An object with a `write` method that can be set to redirect training status messages somewhere other than stdout. To suppress output, set `stats` to None. ''' if stdout is None: stdout = open(os.devnull, 'w') try: self._set_scaling(X) for layer in self.layers: layer.dW_old = np.zeros_like(layer.dW) for iteration in range(max_iterations): self._reset_corrections() self.error = 0 num_samples = 0 num_correct = 0 num_summed = 0 for (x, t) in zip(X, Y): num_samples += 1 num_summed += 1 num_correct += np.all(np.round(self.input(x, clip)) == t) delta = np.array(t) - self.y self.error += 0.5 * sum(delta**2) # Determine incremental weight adjustments self._update_dWs(t) if batch > 0 and num_summed == batch: self._adjust_weights(rate, momentum, num_summed, stdout) num_summed = 0 # In case a partial batch is remaining if batch > 0 and num_summed > 0: self._adjust_weights(rate, momentum, num_summed, stdout) num_summed = 0 self.accuracy = 100. * num_correct / num_samples if on_iteration and on_iteration(self): return True stdout.write('Iter % 5d: Accuracy = %.2f%% E = %f\n' % (iteration, self.accuracy, self.error)) if self.accuracy >= accuracy: stdout.write('Network trained to %.1f%% sample accuracy ' 'in %d iterations.\n' % (self.accuracy, iteration + 1)) return True # If doing full batch learning (batch == 0) if num_summed > 0: self._adjust_weights(rate, momentum, num_summed, stdout) num_summed = 0 except KeyboardInterrupt: stdout.write("KeyboardInterrupt: Terminating training.\n") self._reset_corrections() return False stdout.write('Terminating network training after %d iterations.\n' % (iteration + 1)) return False def _update_dWs(self, t): '''Update weight adjustment values for the current sample.''' # Output layer: # dE/dy = t - y # dz/dW = x layerK = self.layers[-1] layerK.delta = layerK.dy_da() * (t - self.y) layerK.dW += np.outer(layerK.delta, layerK.x) # Hidden layers for i in range(len(self.layers) - 2, -1, -1): (layerJ, layerK) = self.layers[i: i + 2] b = np.dot(layerK.delta, layerK.weights[:, 1:]) layerJ.delta = layerJ.dy_da() * b layerJ.dW += np.outer(layerJ.delta, layerJ.x) def _adjust_weights(self, rate, momentum, num_summed, stdout): '''Applies aggregated weight adjustments to the perceptron weights.''' if self.cache_weights: weights = [np.array(layer.weights) for layer in self.layers] try: if momentum > 0: for layer in self.layers: layer.dW *= (float(rate) / num_summed) layer.dW += momentum * layer.dW_old layer.weights += layer.dW (layer.dW_old, layer.dW) = (layer.dW, layer.dW_old) else: for layer in self.layers: layer.dW *= (float(rate) / num_summed) layer.weights += layer.dW except KeyboardInterrupt: if self.cache_weights: stdout.write('Interrupt during weight adjustment. Restoring ' \ 'previous weights.\n') for i in range(len(weights)): self.layers[i].weights = weights[i] else: stdout.write('Interrupt during weight adjustment. Weight ' \ 'cacheing was disabled so current weights may' \ 'be corrupt.\n') raise finally: self._reset_corrections() def _reset_corrections(self): for layer in self.layers: layer.dW.fill(0) def _set_scaling(self, X): '''Sets translation/scaling of inputs to map X to the range [0, 1].''' mins = maxes = None for x in X: if mins is None: mins = x maxes = x else: mins = np.min([mins, x], axis=0) maxes = np.max([maxes, x], axis = 0) self._offset = mins r = maxes - mins self._scale = 1. / np.where(r < self.min_input_diff, 1, r) # Sample data xor_data = [ [[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1], [0]], ] xor_data2 = [ [[0, 0], [0, 1]], [[0, 1], [1, 0]], [[1, 0], [1, 0]], [[1, 1], [0, 1]], ] and_data = [ [[0, 0], [0]], [[0, 1], [0]], [[1, 0], [0]], [[1, 1], [1]], ] def test_case(XY, shape, *args, **kwargs): (X, Y) = list(zip(*XY)) p = Perceptron(shape) trained = p.train(X, Y, *args, **kwargs) return (trained, p) def test_xor(*args, **kwargs): XY = xor_data shape = [2, 2, 1] return test_case(XY, shape, *args, **kwargs) def test_xor222(*args, **kwargs): XY = xor_data2 shape = [2, 2, 2] return test_case(XY, shape, *args, **kwargs) def test_xor231(*args, **kwargs): XY = xor_data shape = [2, 3, 1] return test_case(XY, shape, *args, **kwargs) def test_and(*args, **kwargs): XY = and_data shape = [2, 1] return test_case(XY, shape, *args, **kwargs) if __name__ == '__main__': tests = [('AND (2x1)', test_and), ('XOR (2x2x1)', test_xor), ('XOR (2x2x2)', test_xor222), ('XOR (2x3x1)', test_xor231)] results = [test[1](5000)[0] for test in tests] nr = [(p[0][0], p[1]) for p in zip(tests, results)] print() print('Training results for 5000 iterations') print('------------------------------------') for (name, result) in nr: s = [ 'FAILED', 'PASSED'][result] print('{0:<20}: {1}'.format(name, s)) if False in results: print('\nNote: XOR convergence for these small network sizes is') print('dependent on initial weights, which are randomized. Try') print('running the test again.') spectral-0.22.4/spectral/algorithms/resampling.py000066400000000000000000000171771412674721200221400ustar00rootroot00000000000000''' Functions for resampling a spectrum from one band discretization to another. ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging import math import numpy as np from ..spectral import BandInfo def erf_local(x): # save the sign of x sign = 1 if x >= 0 else -1 x = abs(x) # constants a1 = 0.254829592 a2 = -0.284496736 a3 = 1.421413741 a4 = -1.453152027 a5 = 1.061405429 p = 0.3275911 # A&S formula 7.1.26 t = 1.0/(1.0 + p*x) y = 1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*math.exp(-x*x) return sign*y # erf(-x) = -erf(x) try: from math import erf except: try: from scipy.special import erf except: erf = erf_local def erfc(z): '''Complement of the error function.''' return 1.0 - erf(z) def normal_cdf(x): '''CDF of the normal distribution.''' sqrt2 = 1.4142135623730951 return 0.5 * erfc(-x / sqrt2) def normal_integral(a, b): '''Integral of the normal distribution from a to b.''' return normal_cdf(b) - normal_cdf(a) def ranges_overlap(R1, R2): '''Returns True if there is overlap between ranges of pairs R1 and R2.''' if (R1[0] < R2[0] and R1[1] < R2[0]) or \ (R1[0] > R2[1] and R1[1] > R2[1]): return False return True def overlap(R1, R2): '''Returns (min, max) of overlap between the ranges of pairs R1 and R2.''' return (max(R1[0], R2[0]), min(R1[1], R2[1])) def normal(mean, stdev, x): sqrt_2pi = 2.5066282746310002 return math.exp(-((x - mean) / stdev)**2 / 2.0) / (sqrt_2pi * stdev) def build_fwhm(centers): '''Returns FWHM list, assuming FWHM is midway between adjacent bands. ''' fwhm = [0] * len(centers) fwhm[0] = centers[1] - centers[0] fwhm[-1] = centers[-1] - centers[-2] for i in range(1, len(centers) - 1): fwhm[i] = (centers[i + 1] - centers[i - 1]) / 2.0 return fwhm def create_resampling_matrix(centers1, fwhm1, centers2, fwhm2): ''' Returns a resampling matrix to convert spectra from one band discretization to another. Arguments are the band centers and full-width half maximum spectral response for the original and new band discretizations. ''' logger = logging.getLogger('spectral') sqrt_8log2 = 2.3548200450309493 N1 = len(centers1) N2 = len(centers2) bounds1 = [[centers1[i] - fwhm1[i] / 2.0, centers1[i] + fwhm1[i] / 2.0] for i in range(N1)] bounds2 = [[centers2[i] - fwhm2[i] / 2.0, centers2[i] + fwhm2[i] / 2.0] for i in range(N2)] M = np.zeros([N2, N1]) jStart = 0 nan = float('nan') for i in range(N2): stdev = fwhm2[i] / sqrt_8log2 j = jStart # Find the first original band that overlaps the new band while j < N1 and bounds1[j][1] < bounds2[i][0]: j += 1 if j == N1: logger.info(('No overlap for target band %d (%f / %f)' % ( i, centers2[i], fwhm2[i]))) M[i, 0] = nan continue matches = [] # Get indices for all original bands that overlap the new band while j < N1 and bounds1[j][0] < bounds2[i][1]: if ranges_overlap(bounds1[j], bounds2[i]): matches.append(j) j += 1 # Put NaN in first element of any row that doesn't produce a band in # the new schema. if len(matches) == 0: logger.info('No overlap for target band %d (%f / %f)', i, centers2[i], fwhm2[i]) M[i, 0] = nan continue # Determine the weights for the original bands that overlap the new # band. There may be multiple bands that overlap or even just a single # band that only partially overlaps. Weights are normoalized so either # case can be handled. overlaps = [overlap(bounds1[k], bounds2[i]) for k in matches] contribs = np.zeros(len(matches)) A = 0. for k in range(len(matches)): #endNorms = [normal(centers2[i], stdev, x) for x in overlaps[k]] #dA = (overlaps[k][1] - overlaps[k][0]) * sum(endNorms) / 2.0 (a, b) = [(x - centers2[i]) / stdev for x in overlaps[k]] dA = normal_integral(a, b) contribs[k] = dA A += dA contribs = contribs / A for k in range(len(matches)): M[i, matches[k]] = contribs[k] return M class BandResampler: '''A callable object for resampling spectra between band discretizations. A source band will contribute to any destination band where there is overlap between the FWHM of the two bands. If there is an overlap, an integral is performed over the region of overlap assuming the source band data value is constant over its FWHM (since we do not know the true spectral load over the source band) and the destination band has a Gaussian response function. Any target bands that do not have any overlapping source bands will contain NaN as the resampled band value. If bandwidths are not specified for source or destination bands, the bands are assumed to have FWHM values that span half the distance to the adjacent bands. ''' def __init__(self, centers1, centers2, fwhm1=None, fwhm2=None): '''BandResampler constructor. Usage: resampler = BandResampler(bandInfo1, bandInfo2) resampler = BandResampler(centers1, centers2, [fwhm1 = None [, fwhm2 = None]]) Arguments: `bandInfo1` (:class:`~spectral.BandInfo`): Discretization of the source bands. `bandInfo2` (:class:`~spectral.BandInfo`): Discretization of the destination bands. `centers1` (list): floats defining center values of source bands. `centers2` (list): floats defining center values of destination bands. `fwhm1` (list): Optional list defining FWHM values of source bands. `fwhm2` (list): Optional list defining FWHM values of destination bands. Returns: A callable BandResampler object that takes a spectrum corresponding to the source bands and returns the spectrum resampled to the destination bands. If bandwidths are not specified, the associated bands are assumed to have FWHM values that span half the distance to the adjacent bands. ''' if isinstance(centers1, BandInfo): fwhm1 = centers1.bandwidths centers1 = centers1.centers if isinstance(centers2, BandInfo): fwhm2 = centers2.bandwidths centers2 = centers2.centers if fwhm1 is None: fwhm1 = build_fwhm(centers1) if fwhm2 is None: fwhm2 = build_fwhm(centers2) self.matrix = create_resampling_matrix( centers1, fwhm1, centers2, fwhm2) def __call__(self, spectrum): '''Takes a source spectrum as input and returns a resampled spectrum. Arguments: `spectrum` (list or :class:`numpy.ndarray`): list or vector of values to be resampled. Must have same length as the source band discretiation used to created the resampler. Returns: A resampled rank-1 :class:`numpy.ndarray` with length corresponding to the destination band discretization used to create the resampler. Any target bands that do not have at lease one overlapping source band will contain `float('nan')` as the resampled band value.''' return np.dot(self.matrix, spectrum) spectral-0.22.4/spectral/algorithms/spatial.py000066400000000000000000000647511412674721200214340ustar00rootroot00000000000000''' Functions over spatial regions of images. ''' from __future__ import absolute_import, division, print_function, unicode_literals __all__ = ['map_window', 'map_outer_window_stats', 'map_class_ids', 'map_classes'] import itertools import numpy as np import spectral as spy from .algorithms import GaussianStats, iterator_ij def get_window_bounds(nrows, ncols, height, width, i, j): '''Returns boundaries of an image window centered on a specified pixel. Arguments: `nrows` (int): Total number of rows in the image `ncols` (int): Total number of columns in the image `height` (int): Height of the desired window (in pixels) `width` (int): Width of the desired window (in pixels) `i` (int): Row index of the pixel `j` (int): Column index of the pixel Return value: A 4-tuple of ints of the form (row_start, row_stop, col_start, col_stop). The dimensions of the window will always be (`height`, `width`). For pixels near the border of the image where there are insufficient pixels between the specified pixel and image border, the window will be flush against the border of the image and the pixel position will be offset from the center of the widow. For an alternate function that clips window pixels near the border of the image, see `get_window_bounds_clipped`. ''' if height > nrows or width > ncols: raise ValueError('Window size is too large for image dimensions.') rmin = i - height // 2 rmax = rmin + height if rmin < 0: rmax = height rmin = 0 elif rmax > nrows: rmax = nrows rmin = nrows - height cmin = j - width // 2 cmax = cmin + width if cmin < 0: cmax = width cmin = 0 elif cmax > ncols: cmax = ncols cmin = ncols - width return (rmin, rmax, cmin, cmax) def get_window_bounds_clipped(nrows, ncols, height, width, i, j): '''Returns boundaries of an image window centered on a specified pixel. Arguments: `nrows` (int): Total number of rows in the image `ncols` (int): Total number of columns in the image `height` (int): Height of the desired window (in pixels) `width` (int): Width of the desired window (in pixels) `i` (int): Row index of the pixel `j` (int): Column index of the pixel Return value: A 4-tuple of ints of the form (row_start, row_stop, col_start, col_stop). Near the boder of the image where there are insufficient pixels between the specified pixel and the image border, the window will be clipped. For an alternate function that always returns a window with dimensions (`width`, `height`), see `get_window_bounds`. ''' if height > nrows or width > ncols: raise ValueError('Window size is too large for image dimensions.') rmin = i - height // 2 rmax = rmin + height if rmin < 0: rmin = 0 elif rmax > nrows: rmax = nrows cmin = j - width // 2 cmax = cmin + width if cmin < 0: cmin = 0 elif cmax > ncols: cmax = ncols return (rmin, rmax, cmin, cmax) def map_window(func, image, window, rslice=(None,), cslice=(None,), border='shift', dtype=None): '''Applies a function over a rolling spatial window. Arguments: `func` (callable): The function to apply. This function must accept two inputs: `X` (ndarray): The image data corresponding to the spatial window for the current pixel being evaluated. `X` will have shape `window + (N,)`, where `N` is the number of bands in the image. For pixels near the border of the image, the first two dimensions of `X` may be smaller if `border` is set to "clip". `ij` (2-tuple of integers): Indicates the row/column of the current pixel within the window. For `window` with even dimensions or for pixels near the image border, this may not correspond to the center pixel in the window. `image` (`SpyFile` or np.ndarray): The image on which the apply `func` with the specified window. `window` (int or 2-tuple of ints): The size of the window, in pixels. If this value is an integer, the height and width of the window will both be set to the value. Otherwise, `window` should be a tuple of the form (height, width). `rslice` (tuple): Tuple of `slice` parameters specifying at which rows the function should be applied. If not provided, `func` is applied to all rows. `cslice` (tuple): Tuple of `slice` parameters specifying at which columns the function should be applied. If not provided, `func` is applied to all columns. `border` (string, default "shift"): Indicates how to handles windows near the edge of the window. If the value is "shift", the window dimensions will alway be `(width, height)` but near the image border the pixel being iterated will be offset from the center of the window. If set to "clip", window regions falling outside the image border will be clipped and the window dimension will be reduced. `dtype` (np.dtype): Optional dtype for the output. Return value: Returns an np.ndarray with shape corresponding to the row and column start/stop indices and shape of `func` output. Examples: --------- To produce a new image that is a 3x3 pixel average of the input image: >>> f = lambda X, ij: np.mean(X.reshape((-1, X.shape[-1])), axis=0) >>> image_3x3 = map_window(f, image, 3) Perform a 5x5 pixel average but only retain values at every fifth row and column (i.e., simulate an image at one fifth resolution): >>> image.shape (145, 145, 220) >>> image_5x5 = map_window(f, image, 5, (2, -2, 5), (2, -2, 5)) >>> image_5x5.shape (29, 29, 220) ''' if isinstance(window, (list, tuple)): (height, width) = window[:] else: (height, width) = (window, window) if border == 'shift': get_window = get_window_bounds elif border == 'clip': get_window = get_window_bounds_clipped else: raise ValueError('Unrecognized border option.') (nrows, ncols) = image.shape[:2] # Row/Col indices at which to apply the windowed function rvals = list(range(*slice(*rslice).indices(nrows))) cvals = list(range(*slice(*cslice).indices(ncols))) def get_val(i, j): (r0, r1, c0, c1) = get_window(nrows, ncols, height, width, i, j) return func(image[r0:r1, c0:c1], (i - r0, j - c0)).astype(dtype) return np.array([[get_val(r, c) for c in cvals] for r in rvals]).astype(dtype) def map_outer_window_stats(func, image, inner, outer, dim_out=1, cov=None, dtype=None, rslice=(None,), cslice=(None,)): '''Maps a function accepting `GaussianStats` over a rolling spatial window. Arguments: `func` (callable): A callable object that will be applied to each pixel when the __call__ method is called for this object. The __call__ method of `func` must accept two arguments: - `X` (`GaussianStats`): The Gaussian statistics computed from pixels in the outer window (excluding the inner window). - `v` (ndarray): An ndarray representing the pixel for which the window was produced. `image` (`SpyFile` or np.ndarray): The image on which the apply `func` with the specified window. `inner` (int or 2-tuple of ints): The size of the inner window, in pixels. If this value is an integer, the height and width of the window will both be set to the given value. Otherwise, `inner` should be a tuple of the form (height, width). All pixels within the inner window are excluded from statistics computed for the associated pixel. `outer` (int or 2-tuple of ints): The size of the outer window, in pixels. If this value is an integer, the height and width of the window will both be set to the given value. Otherwise, `outer` should be a tuple of the form (height, width). All pixels in the outer window (but not in the inner window) are used to compute statistics for the associated pixel. `rslice` (tuple): Tuple of `slice` parameters specifying at which rows the function should be applied. If not provided, `func` is applied to all rows. `cslice` (tuple): Tuple of `slice` parameters specifying at which columns the function should be applied. If not provided, `func` is applied to all columns. `dtype` (np.dtype): Optional dtype for the output. Return value: Returns an np.ndarray whose elements are the result of mapping `func` to the pixels and associated window stats. Examples: --------- To create an RX anomaly detector with a 3x3 pixel inner window and 17x17 outer window (note that `spectral.rx` already does this): >>> def mahalanobis(bg, x): ... return (x - bg.mean).dot(bg.inv_cov).dot(x - bg.mean) ... >>> rx_scores = map_outer_window_stats(mahalanobis, image, 3, 17) ''' mapper = WindowedGaussianBackgroundMapper(inner, outer, func, cov, dim_out, dtype) return mapper(image, rslice, cslice) class WindowedGaussianBackgroundMapper(object): '''A class for procucing window statistics with an inner exclusion window. ''' def __init__(self, inner, outer, function=None, cov=None, dim_out=None, dtype=None): '''Creates a detector with the given inner/outer window. Arguments: `inner` (integer or 2-tuple of integers): Width and heigth of inner window, in pixels. `outer` (integer or 2-tuple of integers): Width and heigth of outer window, in pixels. Dimensions must be greater than inner window `function` (callable object): A callable object that will be applied to each pixel when the __call__ method is called for this object. The __call__ method of `function` must accept two arguments: - A `GaussianStats` object. - An ndarray representing the pixel for which the were computed. `cov` (ndarray): An optional covariance to use. If this parameter is given, `cov` will be used for all RX calculations (background covariance will not be recomputed in each window). Only the background mean will be recomputed in each window). `dim_out` (int): The dimensionality of the output of `function` when called on a pixel spectrum. If this value is not specified, `function` will be checked to see if it has a `dim_out` member. If it does not, `dim_out` will be assumed to be 1. `dtype`: Optional dtype for the output array. If not specified, np.float32 is used. ''' if isinstance(inner, (list, tuple)): self.inner = inner[:] else: self.inner = (inner, inner) if isinstance(outer, (list, tuple)): self.outer = outer[:] else: self.outer = (outer, outer) self.callable = function self.cov = cov self.dim_out = dim_out self.create_mask = None if dtype is not None: self.dtype = dtype else: self.dtype = np.float32 def __call__(self, image, rslice=(None,), cslice=(None,)): '''Applies the objects callable function to the image data. Arguments: `image` (numpy.ndarray): An image with shape (R, C, B). `rslice` (tuple): Tuple of `slice` parameters specifying at which rows the function should be applied. If not provided, `func` is applied to all rows. `cslice` (tuple): Tuple of `slice` parameters specifying at which columns the function should be applied. If not provided, `func` is applied to all columns. Returns numpy.ndarray: An array whose elements correspond to the outputs from the object's callable function. ''' (R, C, B) = image.shape (row_border, col_border) = [x // 2 for x in self.outer] if self.dim_out is not None: dim_out = self.dim_out elif hasattr(self.callable, 'dim_out') and \ self.callable.dim_out is not None: dim_out = self.callable.dim_out else: dim_out = 1 # Row/Col indices at which to apply the windowed function rvals = list(range(*slice(*rslice).indices(R))) cvals = list(range(*slice(*cslice).indices(C))) nrows_out = len(rvals) ncols_out = len(cvals) if dim_out > 1: x = np.ones((nrows_out, ncols_out, dim_out), dtype=np.float32) * -1.0 else: x = np.ones((nrows_out, ncols_out), dtype=self.dtype) * -1.0 npixels = self.outer[0] * self.outer[1] - self.inner[0] * self.inner[1] if self.cov is None and npixels < B: raise ValueError('Window size provides too few samples for ' \ 'image data dimensionality.') if self.create_mask is not None: create_mask = self.create_mask else: create_mask = inner_outer_window_mask_creator(image.shape, self.inner, self.outer) interior_mask = create_mask(R // 2, C // 2, True)[2].ravel() interior_indices = np.argwhere(interior_mask == 0).squeeze() (i_interior_start, i_interior_stop) = (row_border, R - row_border) (j_interior_start, j_interior_stop) = (col_border, C - col_border) status = spy._status status.display_percentage('Processing image: ') if self.cov is not None: # Since we already have the covariance, just use np.mean to get # means of the inner window and outer (including the inner), then # use those to calculate the mean of the outer window alone. background = GaussianStats(cov=self.cov) for i in range(nrows_out): for j in range(ncols_out): (inner, outer) = create_mask(rvals[i], cvals[j], False) N_in = (inner[1] - inner[0]) * (inner[3] - inner[2]) N_tot = (outer[1] - outer[0]) * (outer[3] - outer[2]) mean_out = np.mean(image[outer[0]: outer[1], outer[2]: outer[3]].reshape(-1, B), axis=0) mean_in = np.mean(image[outer[0]: outer[1], outer[2]: outer[3]].reshape(-1, B), axis=0) mean = mean_out * (float(N_tot) / (N_tot - N_in)) - \ mean_in * (float(N_in) / (N_tot - N_in)) background.mean = mean x[i, j] = self.callable(background, image[rvals[i], cvals[j]]) if i % (nrows_out // 10) == 0: status.update_percentage(100. * i // nrows_out) else: # Need to calculate both the mean and covariance for the outer # window (without the inner). (h, w) = self.outer[:] for i in range(nrows_out): ii = rvals[i] - h // 2 for j in range(ncols_out): jj = cvals[j] - w // 2 if i_interior_start <= rvals[i] < i_interior_stop and \ j_interior_start <= cvals[j] < j_interior_stop: X = image[ii : ii + h, jj : jj + w, :] indices = interior_indices else: (inner, (i0, i1, j0, j1), mask) = \ create_mask(rvals[i], cvals[j], True) indices = np.argwhere(mask.ravel() == 0).squeeze() X = image[i0 : i1, j0 : j1, :] X = np.take(X.reshape((-1, B)), indices, axis=0) mean = np.mean(X, axis=0) cov = np.cov(X, rowvar=False) background = GaussianStats(mean, cov) x[i, j] = self.callable(background, image[rvals[i], cvals[j]]) if i % (nrows_out // 10) == 0: status.update_percentage(100. * i / nrows_out) status.end_percentage() return x def inner_outer_window_mask_creator(image_shape, inner, outer): '''Returns a function to give inner/outer windows. Arguments: `image_shape` (tuple of integers): Specifies the dimensions of the image for which windows are to be produced. Only the first two dimensions (rows, columns) is used. `inner` (int or 2-tuple of integers): Height and width of the inner window, in pixels. `outer` (int or 2-tuple of integers): Height and width of the outer window, in pixels. Return value: A function that accepts the following arguments: `i` (int): Row index of pixel for which to generate the mask `j` (int): Row index of pixel for which to generate the mask `gen_mask` (bool, default False): A boolean flag indicating whether to return a boolean mask of shape (window[1], window[1]), indicating which pixels in the window should be used for background statistics calculations. If `gen_mask` is False, the return value is a 2-tuple of 4-tuples, where the 2-tuples specify the start/stop row/col indices for the inner and outer windows, respectively. Each of the 4-tuples is of the form (row_start, row_stop, col_start, col_stop). If `gen_mask` is True, a third element is added the tuple, which is the boolean mask for the inner/outer window. ''' (R, C) = image_shape[:2] if isinstance(inner, (list, tuple)): (hi, wi) = inner[:] else: (hi, wi) = (inner, inner) if isinstance(outer, (list, tuple)): (ho, wo) = outer[:] else: (ho, wo) = (outer, outer) if wi > wo or hi > ho: raise ValueError('Inner window dimensions must be smaller than outer.') (ai, bi) = (hi // 2, wi // 2) (ao, bo) = (ho // 2, wo // 2) def create_mask(i, j, gen_mask=False): # Inner window inner_imin = i - ai inner_imax = inner_imin + hi if inner_imin < 0: inner_imax = hi inner_imin = 0 elif inner_imax > R: inner_imax = R inner_imin = R - hi inner_jmin = j - bi inner_jmax = inner_jmin + wi if inner_jmin < 0: inner_jmax = wi inner_jmin = 0 elif inner_jmax > C: inner_jmax = C inner_jmin = C - wi # Outer window outer_imin = i - ao outer_imax = outer_imin + ho if outer_imin < 0: outer_imax = ho outer_imin = 0 elif outer_imax > R: outer_imax = R outer_imin = R - ho outer_jmin = j - bo outer_jmax = outer_jmin + wo if outer_jmin < 0: outer_jmax = wo outer_jmin = 0 elif outer_jmax > C: outer_jmax = C outer_jmin = C - wo inner = (inner_imin, inner_imax, inner_jmin, inner_jmax) outer = (outer_imin, outer_imax, outer_jmin, outer_jmax) if not gen_mask: return (inner, outer) mask = np.zeros((ho, wo), dtype=np.bool) mask[inner_imin - outer_imin : inner_imax - outer_imin, inner_jmin - outer_jmin : inner_jmax - outer_jmin] = True return (inner, outer, mask) return create_mask def map_class_ids(src_class_image, dest_class_image, unlabeled=None): '''Create a mapping between class labels in two classification images. Running a classification algorithm (particularly an unsupervised one) multiple times on the same image can yield similar results but with different class labels (indices) for the same classes. This function produces a mapping of class indices from one classification image to another by finding class indices that share the most pixels between the two classification images. Arguments: `src_class_image` (ndarray): An MxN integer array of class indices. The indices in this array will be mapped to indices in `dest_class_image`. `dest_class_image` (ndarray): An MxN integer array of class indices. `unlabeled` (int or array of ints): If this argument is provided, all pixels (in both images) will be ignored when counting coincident pixels to determine the mapping. If mapping a classification image to a ground truth image that has a labeled background value, set `unlabeled` to that value. Return Value: A dictionary whose keys are class indices from `src_class_image` and whose values are class indices from `dest_class_image`. .. seealso:: :func:`map_classes` ''' src_ids = list(set(src_class_image.ravel())) dest_ids = list(set(dest_class_image.ravel())) cmap = {} if unlabeled is not None: if isinstance(unlabeled, int): unlabeled = [unlabeled] for i in unlabeled: if i in src_ids: src_ids.remove(i) cmap[i] = i if i in dest_ids: dest_ids.remove(i) else: unlabeled = [] N_src = len(src_ids) N_dest = len(dest_ids) # Create matrix of coincidence counts between classes in src and dest. matches = np.zeros((N_src, N_dest), np.uint16) for i in range(N_src): src_is_i = (src_class_image == src_ids[i]) for j in range(N_dest): matches[i, j] = np.sum(np.logical_and(src_is_i, dest_class_image == dest_ids[j])) unmapped = set(src_ids) dest_available = set(dest_ids) while len(unmapped) > 0: (i, j) = tuple(np.argwhere(matches == np.max(matches))[0]) mmax = matches[i, j] if mmax == 0: # Nothing left to map. Pick unused indices from dest_class_image for (old, new) in zip(sorted(unmapped), sorted(dest_available)): cmap[old] = new unmapped.remove(old) dest_available.remove(new) for old in unmapped: # The list of target classes has been exhausted. Pick the # smallest dest value that isn't already used. def next_id(): for ii in itertools.count(): if ii not in unlabeled and ii not in cmap.values(): return ii cmap[old] = next_id() break cmap[src_ids[i]] = dest_ids[j] unmapped.remove(src_ids[i]) dest_available.remove(dest_ids[j]) matches[i, :] = 0 matches[:, j] = 0 return cmap def map_classes(class_image, class_id_map, allow_unmapped=False): '''Modifies class indices according to a class index mapping. Arguments: `class_image`: (ndarray): An MxN array of integer class indices. `class_id_map`: (dict): A dict whose keys are indices from `class_image` and whose values are new values for the corresponding indices. This value is usually the output of :func:`map_class_ids`. `allow_unmapped` (bool, default False): A flag indicating whether class indices can appear in `class_image` without a corresponding key in `class_id_map`. If this value is False and an index in the image is found without a mapping key, a :class:`ValueError` is raised. If True, the unmapped index will appear unmodified in the output image. Return Value: An integer-valued ndarray with same shape as `class_image` Example: >>> m = spy.map_class_ids(result, gt, unlabeled=0) >>> result_mapped = spy.map_classes(result, m) .. seealso:: :func:`map_class_ids` ''' if not allow_unmapped \ and not set(class_id_map.keys()).issuperset(set(class_image.ravel())): raise ValueError('`src` has class values with no mapping key') mapped = np.array(class_image) for (i, j) in class_id_map.items(): mapped[class_image == i] = j return mapped def expand_binary_mask_for_window(mask, height, width): '''Returns a new mask including window around each pixel in source mask. Arguments: `mask` (2D ndarray): An ndarray whose non-zero elements define a mask. `height` (int): Height of the window. `width` (int): Width of the window Returns a new mask of ones and zeros with same shape as `mask`. For each non-zero element in mask, the returned mask will contain a value of one for all pixels in the `height`x`width` window about the pixel and zeros elsewhere. ''' m = np.zeros_like(mask) (mask_height, mask_width) = mask.shape for (i, j) in iterator_ij(mask): (r0, r1, c0, c1) = get_window_bounds_clipped(mask_height, mask_width, height, width, i, j) m[r0:r1, c0:c1] = 1 return m spectral-0.22.4/spectral/algorithms/spymath.py000066400000000000000000000055301412674721200214520ustar00rootroot00000000000000''' Miscellaneous math functions. ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np def matrix_sqrt(X=None, symmetric=False, inverse=False, eigs=None): '''Returns the matrix square root of X. Arguments: `X` (square class::`numpy.ndarrray`) `symmetric` (bool, default False): If True, `X` is assumed to be symmetric, which speeds up calculation of the square root. `inverse` (bool, default False): If True, computes the matrix square root of inv(X). `eigs` (2-tuple): `eigs` must be a 2-tuple whose first element is an array of eigenvalues and whose second element is an ndarray of eigenvectors (individual eigenvectors are in columns). If this argument is provided, computation of the matrix square root is much faster. If this argument is provided, the `X` argument is ignored (in this case, it can be set to None). Returns a class::`numpy.ndarray` `S`, such that S.dot(S) = X ''' if eigs is not None: (vals, V) = eigs else: (vals, V) = np.linalg.eig(X) k = len(vals) if inverse is False: SRV = np.diag(np.sqrt(vals)) else: SRV = np.diag(1. / np.sqrt(vals)) if symmetric: return V.dot(SRV).dot(V.T) else: return V.dot(SRV).dot(np.linalg.inv(V)) def get_histogram_cdf_points(data, cdf_vals, ignore=None, mask=None): '''Returns input values corresponding to the data's CDF values. Arguments: `data` (ndarray): The data for which to determine the CDF values `cdf_vals` (sequence of floats): A sequence defining the CDF values for which the values of `data` should be returned. Each value should be in the range [0, 1]. For example, to get the values of `data` corresponding to the 1% lower tail and 5% upper tail, this argument would be (0.01, 0.95). `ignore` (numeric, default `None`): A scalar value that should be ignored when computing histogram points (e.g., a value that indicates bad data). If this valus is not specified, all data are used. Return value: A list specifying the values in `data` that correspond to the associated CDF values in `cdf_vals`. ''' data = data.ravel() if mask is not None: data = data[mask.ravel() != 0] if len(data) == 0: raise Exception('All pixels are masked.') if ignore is not None and ignore in data: data = data[np.where(data != ignore)] if len(data) == 0: raise Exception('No data to display after masking and ignoring.') isort = np.argsort(data) N = len(data) return [data[isort[int(x * (N - 1))]] for x in cdf_vals] spectral-0.22.4/spectral/algorithms/transforms.py000066400000000000000000000122471412674721200221660ustar00rootroot00000000000000''' Base classes for various types of transforms. ''' from __future__ import absolute_import, division, print_function, unicode_literals import collections import numpy as np class LinearTransform: '''A callable linear transform object. In addition to the __call__ method, which applies the transform to given, data, a LinearTransform object also has the following members: `dim_in` (int): The expected length of input vectors. This will be `None` if the input dimension is unknown (e.g., if the transform is a scalar). `dim_out` (int): The length of output vectors (after linear transformation). This will be `None` if the input dimension is unknown (e.g., if the transform is a scalar). `dtype` (numpy dtype): The numpy dtype for the output ndarray data. ''' def __init__(self, A, **kwargs): '''Arguments: `A` (:class:`~numpy.ndarrray`): An (J,K) array to be applied to length-K targets. Keyword Argments: `pre` (scalar or length-K sequence): Additive offset to be applied prior to linear transformation. `post` (scalar or length-J sequence): An additive offset to be applied after linear transformation. `dtype` (numpy dtype): Explicit type for transformed data. ''' self._pre = kwargs.get('pre', None) self._post = kwargs.get('post', None) A = np.array(A, copy=True) if A.ndim == 0: # Do not know input/ouput dimensions self._A = A (self.dim_out, self.dim_in) = (None, None) else: if len(A.shape) == 1: self._A = A.reshape(((1,) + A.shape)) else: self._A = A (self.dim_out, self.dim_in) = self._A.shape self.dtype = kwargs.get('dtype', self._A.dtype) def __call__(self, X): '''Applies the linear transformation to the given data. Arguments: `X` (:class:`~numpy.ndarray` or object with `transform` method): If `X` is an ndarray, it is either an (M,N,K) array containing M*N length-K vectors to be transformed or it is an (R,K) array of length-K vectors to be transformed. If `X` is an object with a method named `transform` the result of passing the `LinearTransform` object to the `transform` method will be returned. Returns an (M,N,J) or (R,J) array, depending on shape of `X`, where J is the length of the first dimension of the array `A` passed to __init__. ''' if not isinstance(X, np.ndarray): if hasattr(X, 'transform') and isinstance(X.transform, collections.Callable): return X.transform(self) else: raise TypeError('Unable to apply transform to object.') shape = X.shape if len(shape) == 3: X = X.reshape((-1, shape[-1])) if self._pre is not None: X = X + self._pre Y = np.dot(self._A, X.T).T if self._post is not None: Y += self._post return Y.reshape((shape[:2] + (-1,))).squeeze().astype(self.dtype) else: if self._pre is not None: X = X + self._pre Y = np.dot(self._A, X.T).T if self._post is not None: Y += self._post return Y.astype(self.dtype) def chain(self, transform): '''Chains together two linear transforms. If the transform `f1` is given by .. math:: F_1(X) = A_1(X + b_1) + c_1 and `f2` by .. math:: F_2(X) = A_2(X + b_2) + c_2 then `f1.chain(f2)` returns a new LinearTransform, `f3`, whose output is given by .. math:: F_3(X) = F_2(F_1(X)) ''' if isinstance(transform, np.ndarray): transform = LinearTransform(transform) if self.dim_in is not None and transform.dim_out is not None \ and self.dim_in != transform.dim_out: raise Exception('Input/Output dimensions of chained transforms' 'do not match.') # Internally, the new transform is computed as: # Y = f2._A.dot(f1._A).(X + f1._pre) + f2._A.(f1._post + f2._pre) + f2._post # However, any of the _pre/_post members could be `None` so that needs # to be checked. if transform._pre is not None: pre = np.array(transform._pre) else: pre = None post = None if transform._post is not None: post = np.array(transform._post) if self._pre is not None: post += self._pre elif self._pre is not None: post = np.array(self._pre) if post is not None: post = self._A.dot(post) if self._post: post += self._post if post is not None: post = np.array(post) A = np.dot(self._A, transform._A) return LinearTransform(A, pre=pre, post=post) spectral-0.22.4/spectral/config.py000066400000000000000000000133231412674721200170600ustar00rootroot00000000000000''' Code for package-level customization. ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np class SpySettings: '''Run-time settings for the `spectral` module. After importing `spectral`, the settings object is referenced as `spectral.settings`. Noteworthy members: `WX_GL_DEPTH_SIZE` (integer, default 24): Sets the depth (in number of bits) for the OpenGL depth buffer. If calls to `view_cube` or `view_nd` result in windows with blank canvases, try reducing this value. `envi_support_nonlowercase_params` (bool, default False) By default, ENVI headers are read with parameter names converted to lower case. If this attribute is set to True, parameters will be read with original capitalization retained. `show_progress` (bool, default True): Indicates whether long-running algorithms should display progress to sys.stdout. It can be useful to set this value to False when SPy is embedded in another application (e.g., IPython Notebook). `imshow_figure_size` (2-tuple of integers, default `None`): Width and height (in inches) of windows opened with `imshow`. If this value is `None`, matplotlib's default size is used. `imshow_background_color` (3-tuple of integers, default (0,0,0)): Default color to use for masked pixels in `imshow` displays. `imshow_interpolation` (str, default `None`): Pixel interpolation to be used in imshow windows. If this value is `None`, matplotlib's default interpolation is used. Note that zoom windows always use "nearest" interpolation. `imshow_stretch`: Default RGB linear color stretch to perform. `imshow_stretch_all`: If True, each color channel limits are determined independently. `imshow_zoom_figure_width` (int, default `None`): Width of zoom windows opened from an imshow window. Since zoom windows are always square, this is also the window height. If this value is `None`, matplotlib's default window size is used. `imshow_zoom_pixel_width` (int, default 50): Number of source image pixel rows and columns to display in a zoom window. `imshow_float_cmap` (str, default "gray"): imshow color map to use with floating point arrays. `imshow_class_alpha` (float, default 0.5): alpha blending value to use for imshow class overlays `imshow_enable_rectangle_selector` (bool, default True): Whether to create the rectangle selection tool that enables interactive image pixel class labeling. On some OS/backend combinations, an exception may be raised when this object is created so disabling it allows imshow windows to be created without using the selector tool. `imshow_disable_mpl_callbacks` (bool, default True): If True, several matplotlib keypress event callbacks will be disabled to prevent conflicts with callbacks from SPy. The matplotlib callbacks can be set back to their defaults by calling `matplotlib.rcdefaults()`. ''' viewer = None plotter = None # If START_WX_APP is True and there is no current wx.App object when a # GUI function is called, then an app object will be created. START_WX_APP = True # Parameter used by GLCanvas objects in view_cube and view_nd. If the # canvas does not render, try reducing this value (e.g., 16). WX_GL_DEPTH_SIZE = 24 envi_support_nonlowercase_params = False # Should algorithms show completion progress of algorithms? show_progress = True # imshow settings imshow_figure_size = None imshow_background_color = (0, 0, 0) imshow_interpolation = None imshow_stretch = (0.0, 1.0) imshow_stretch_all = True imshow_zoom_figure_width = None imshow_zoom_pixel_width = 50 imshow_float_cmap = 'gray' imshow_class_alpha = 0.5 imshow_enable_rectangle_selector = True imshow_disable_mpl_callbacks = True # Default color table spy_colors = np.array([[0, 0, 0], [255, 0, 0], [0, 255, 0], [0, 0, 255], [255, 255, 0], [255, 0, 255], [0, 255, 255], [200, 100, 0], [0, 200, 100], [100, 0, 200], [200, 0, 100], [100, 200, 0], [0, 100, 200], [150, 75, 75], [75, 150, 75], [75, 75, 150], [255, 100, 100], [100, 255, 100], [100, 100, 255], [255, 150, 75], [75, 255, 150], [150, 75, 255], [50, 50, 50], [100, 100, 100], [150, 150, 150], [200, 200, 200], [250, 250, 250], [100, 0, 0], [200, 0, 0], [0, 100, 0], [0, 200, 0], [0, 0, 100], [0, 0, 200], [100, 100, 0], [200, 200, 0], [100, 0, 100], [200, 0, 200], [0, 100, 100], [0, 200, 200]], np.int16) spectral-0.22.4/spectral/database/000077500000000000000000000000001412674721200170035ustar00rootroot00000000000000spectral-0.22.4/spectral/database/__init__.py000066400000000000000000000002751412674721200211200ustar00rootroot00000000000000from __future__ import absolute_import, division, print_function, unicode_literals from .aster import AsterDatabase from .ecostress import EcostressDatabase from .usgs import USGSDatabase spectral-0.22.4/spectral/database/aster.py000066400000000000000000000364041412674721200205020ustar00rootroot00000000000000''' Code for reading and managing ASTER spectral library data. ''' from __future__ import absolute_import, division, print_function, unicode_literals from spectral.utilities.python23 import IS_PYTHON3, tobytes, frombytes from .spectral_database import SpectralDatabase if IS_PYTHON3: readline = lambda fin: fin.readline() open_file = lambda filename: open(filename, encoding='iso-8859-1') else: readline = lambda fin: fin.readline().decode('iso-8859-1') open_file = lambda filename: open(filename) table_schemas = [ 'CREATE TABLE Samples (SampleID INTEGER PRIMARY KEY, Name TEXT, Type TEXT, Class TEXT, SubClass TEXT, ' 'ParticleSize TEXT, SampleNum TEXT, Owner TEXT, Origin TEXT, Phase TEXT, Description TEXT)', 'CREATE TABLE Spectra (SpectrumID INTEGER PRIMARY KEY, SampleID INTEGER, SensorCalibrationID INTEGER, ' 'Instrument TEXT, Environment TEXT, Measurement TEXT, ' 'XUnit TEXT, YUnit TEXT, MinWavelength FLOAT, MaxWavelength FLOAT, ' 'NumValues INTEGER, XData BLOB, YData BLOB)', ] arraytypecode = chr(ord('f')) # These files contained malformed signature data and will be ignored. bad_files = [ 'jhu.nicolet.mineral.silicate.tectosilicate.fine.albite1.spectrum.txt', 'usgs.perknic.rock.igneous.mafic.colid.me3.spectrum.txt' ] def read_pair(fin, num_lines=1): '''Reads a colon-delimited attribute-value pair from the file stream.''' s = '' for i in range(num_lines): s += " " + readline(fin).strip() return [x.strip().lower() for x in s.split(':')] class Signature: '''Object to store sample/measurement metadata, as well as wavelength-signatrure vectors.''' def __init__(self): self.sample = {} self.measurement = {} def read_aster_file(filename): '''Reads an ASTER 2.x spectrum file.''' fin = open_file(filename) s = Signature() # Number of lines per metadata attribute value lpv = [1] * 8 + [2] + [6] # A few files have an additional "Colleted by" sample metadata field, which # sometimes affects the number of header lines haveCollectedBy = False for i in range(30): line = readline(fin).strip() if line.find('Collected by:') >= 0: haveCollectedBy = True collectedByLineNum = i if line.startswith('Description:'): descriptionLineNum = i if line.startswith('Measurement:'): measurementLineNum = i if haveCollectedBy: lpv = [1] * 10 + [measurementLineNum - descriptionLineNum] # Read sample metadata fin.seek(0) for i in range(len(lpv)): pair = read_pair(fin, lpv[i]) s.sample[pair[0].lower()] = pair[1] # Read measurement metadata lpv = [1] * 8 + [2] for i in range(len(lpv)): pair = read_pair(fin, lpv[i]) if len(pair) < 2: print(pair) s.measurement[pair[0].lower()] = pair[1] # Read signature spectrum pairs = [] for line in fin.readlines(): line = line.strip() if len(line) == 0: continue pair = line.split() nItems = len(pair) # Try to handle invalid values on signature lines if nItems == 1: # print 'single item (%s) on signature line, %s' \ # % (pair[0], filename) continue elif nItems > 2: print('more than 2 values on signature line,', filename) continue try: x = float(pair[0]) except: print('corrupt signature line,', filename) if x == 0: # print 'Zero wavelength value', filename continue elif x < 0: print('Negative wavelength value,', filename) continue pairs.append(pair) [x, y] = [list(v) for v in zip(*pairs)] # Make sure wavelengths are ascending if float(x[0]) > float(x[-1]): x.reverse() y.reverse() s.x = [float(val) for val in x] s.y = [float(val) for val in y] s.measurement['first x value'] = x[0] s.measurement['last x value'] = x[-1] s.measurement['number of x values'] = len(x) fin.close() return s class AsterDatabase(SpectralDatabase): '''A relational database to manage ASTER spectral library data.''' schemas = table_schemas def _add_sample(self, name, sampleType, sampleClass, subClass, particleSize, sampleNumber, owner, origin, phase, description): sql = '''INSERT INTO Samples (Name, Type, Class, SubClass, ParticleSize, SampleNum, Owner, Origin, Phase, Description) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''' self.cursor.execute(sql, (name, sampleType, sampleClass, subClass, particleSize, sampleNumber, owner, origin, phase, description)) rowId = self.cursor.lastrowid self.db.commit() return rowId def _add_signature( self, sampleID, calibrationID, instrument, environment, measurement, xUnit, yUnit, minWavelength, maxWavelength, xData, yData): import sqlite3 import array sql = '''INSERT INTO Spectra (SampleID, SensorCalibrationID, Instrument, Environment, Measurement, XUnit, YUnit, MinWavelength, MaxWavelength, NumValues, XData, YData) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''' xBlob = sqlite3.Binary(tobytes(array.array(arraytypecode, xData))) yBlob = sqlite3.Binary(tobytes(array.array(arraytypecode, yData))) numValues = len(xData) self.cursor.execute( sql, ( sampleID, calibrationID, instrument, environment, measurement, xUnit, yUnit, minWavelength, maxWavelength, numValues, xBlob, yBlob)) rowId = self.cursor.lastrowid self.db.commit() return rowId @classmethod def create(cls, filename, aster_data_dir=None): '''Creates an ASTER relational database by parsing ASTER data files. Arguments: `filename` (str): Name of the new sqlite database file to create. `aster_data_dir` (str): Path to the directory containing ASTER library data files. If this argument is not provided, no data will be imported. Returns: An :class:`~spectral.database.AsterDatabase` object. Example:: >>> AsterDatabase.create("aster_lib.db", "/CDROM/ASTER2.0/data") This is a class method (it does not require instantiating an AsterDatabase object) that creates a new database by parsing all of the files in the ASTER library data directory. Normally, this should only need to be called once. Subsequently, a corresponding database object can be created by instantiating a new AsterDatabase object with the path the database file as its argument. For example:: >>> from spectral.database.aster import AsterDatabase >>> db = AsterDatabase("aster_lib.db") ''' import os if os.path.isfile(filename): raise Exception('Error: Specified file already exists.') db = cls() db._connect(filename) for schema in cls.schemas: db.cursor.execute(schema) if aster_data_dir: db._import_files(aster_data_dir) return db def __init__(self, sqlite_filename=None): '''Creates a database object to interface an existing database. Arguments: `sqlite_filename` (str): Name of the database file. If this argument is not provided, an interface to a database file will not be established. Returns: An :class:`~spectral.AsterDatabase` connected to the database. ''' from spectral.io.spyfile import find_file_path if sqlite_filename: self._connect(find_file_path(sqlite_filename)) else: self.db = None self.cursor = None def read_file(self, filename): return read_aster_file(filename) def _import_files(self, data_dir, ignore=bad_files): '''Read each file in the ASTER library and convert to AVIRIS bands.''' from glob import glob import numpy import os if not os.path.isdir(data_dir): raise Exception('Error: Invalid directory name specified.') if ignore is not None: filesToIgnore = [data_dir + '/' + f for f in ignore] else: filesToIgnore = [] numFiles = 0 numIgnored = 0 sigID = 1 class Sig: pass sigs = [] for f in glob(data_dir + '/*spectrum.txt'): if f in filesToIgnore: numIgnored += 1 continue print('Importing %s.' % f) numFiles += 1 sig = self.read_file(f) s = sig.sample if s['particle size'].lower == 'liquid': phase = 'liquid' else: phase = 'solid' if 'sample no.' in s: sampleNum = s['sample no.'] else: sampleNum = '' id = self._add_sample( s['name'], s['type'], s['class'], s[ 'subclass'], s['particle size'], sampleNum, s['owner'], s['origin'], phase, s['description']) instrument = os.path.basename(f).split('.')[1] environment = 'lab' m = sig.measurement # Correct numerous mispellings of "reflectance" and "transmittance" yUnit = m['y units'] if yUnit.find('reflectence') > -1: yUnit = 'reflectance (percent)' elif yUnit.find('trans') == 0: yUnit = 'transmittance (percent)' measurement = m['measurement'] if measurement[0] == 't': measurement = 'transmittance' self._add_signature(id, -1, instrument, environment, measurement, m['x units'], yUnit, m['first x value'], m['last x value'], sig.x, sig.y) if numFiles == 0: print('No data files were found in directory "%s".' \ % data_dir) else: print('Processed %d files.' % numFiles) if numIgnored > 0: print('Ignored the following %d bad files:' % (numIgnored)) for f in filesToIgnore: print('\t' + f) return sigs def get_spectrum(self, spectrumID): '''Returns a spectrum from the database. Usage: (x, y) = aster.get_spectrum(spectrumID) Arguments: `spectrumID` (int): The **SpectrumID** value for the desired spectrum from the **Spectra** table in the database. Returns: `x` (list): Band centers for the spectrum. `y` (list): Spectrum data values for each band. Returns a pair of vectors containing the wavelengths and measured values values of a measurment. For additional metadata, call "get_signature" instead. ''' import array query = '''SELECT XData, YData FROM Spectra WHERE SpectrumID = ?''' result = self.cursor.execute(query, (spectrumID,)) rows = result.fetchall() if len(rows) < 1: raise 'Measurement record not found' x = array.array(arraytypecode) frombytes(x, rows[0][0]) y = array.array(arraytypecode) frombytes(y, rows[0][1]) return (list(x), list(y)) def get_signature(self, spectrumID): '''Returns a spectrum with some additional metadata. Usage:: sig = aster.get_signature(spectrumID) Arguments: `spectrumID` (int): The **SpectrumID** value for the desired spectrum from the **Spectra** table in the database. Returns: `sig` (:class:`~spectral.database.aster.Signature`): An object with the following attributes: ============== ===== ======================================== Attribute Type Description ============== ===== ======================================== measurement_id int SpectrumID value from Spectra table sample_name str **Sample** from the **Samples** table sample_id int **SampleID** from the **Samples** table x list list of band center wavelengths y list list of spectrum values for each band ============== ===== ======================================== ''' import array # Retrieve spectrum from Spectra table query = '''SELECT Samples.Name, Samples.SampleID, XData, YData FROM Samples, Spectra WHERE Samples.SampleID = Spectra.SampleID AND Spectra.SpectrumID = ?''' result = self.cursor.execute(query, (spectrumID,)) results = result.fetchall() if len(results) < 1: raise "Measurement record not found" sig = Signature() sig.measurement_id = spectrumID sig.sample_name = results[0][0] sig.sample_id = results[0][1] x = array.array(arraytypecode) frombytes(x, results[0][2]) sig.x = list(x) y = array.array(arraytypecode) frombytes(y, results[0][3]) sig.y = list(y) return sig def create_envi_spectral_library(self, spectrumIDs, bandInfo): '''Creates an ENVI-formatted spectral library for a list of spectra. Arguments: `spectrumIDs` (list of ints): List of **SpectrumID** values for of spectra in the "Spectra" table of the ASTER database. `bandInfo` (:class:`~spectral.BandInfo`): The spectral bands to which the original ASTER library spectra will be resampled. Returns: A :class:`~spectral.io.envi.SpectralLibrary` object. The IDs passed to the method should correspond to the SpectrumID field of the ASTER database "Spectra" table. All specified spectra will be resampled to the same discretization specified by the bandInfo parameter. See :class:`spectral.BandResampler` for details on the resampling method used. ''' from spectral.algorithms.resampling import BandResampler from spectral.io.envi import SpectralLibrary import numpy import unicodedata spectra = numpy.empty((len(spectrumIDs), len(bandInfo.centers))) names = [] for i in range(len(spectrumIDs)): sig = self.get_signature(spectrumIDs[i]) resample = BandResampler( sig.x, bandInfo.centers, None, bandInfo.bandwidths) spectra[i] = resample(sig.y) names.append(unicodedata.normalize('NFKD', sig.sample_name). encode('ascii', 'ignore')) header = {} header['wavelength units'] = 'um' header['spectra names'] = names header['wavelength'] = bandInfo.centers header['fwhm'] = bandInfo.bandwidths return SpectralLibrary(spectra, header, {}) spectral-0.22.4/spectral/database/ecostress.py000066400000000000000000000165041412674721200213750ustar00rootroot00000000000000''' Code for reading and managing ECOSTRESS spectral library data. ''' from __future__ import absolute_import, division, print_function, unicode_literals import itertools import logging from spectral.utilities.python23 import IS_PYTHON3 from .aster import AsterDatabase, Signature if IS_PYTHON3: readline = lambda fin: fin.readline() open_file = lambda filename: open(filename, encoding='iso-8859-1') else: readline = lambda fin: fin.readline().decode('iso-8859-1') open_file = lambda filename: open(filename) def read_ecostress_file(filename): '''Reads an ECOSTRESS v1 spectrum file.''' logger = logging.getLogger('spectral') lines = open_file(filename).readlines() if not IS_PYTHON3: lines = [line.decode('iso-8859-1') for line in lines] metaline_to_pair = lambda line: [x.strip() for x in line.split(':', 1)] s = Signature() # Read sample metadata for i in itertools.count(): if lines[i].strip().startswith('Measurement'): break pair = metaline_to_pair(lines[i]) try: s.sample[pair[0].lower()] = pair[1] except: logger.error('Failed to parse line: {}: {}'.format(i, lines[i])) raise # Read measurment metadata for j in itertools.count(i): if len(lines[j].strip()) == 0: break pair = metaline_to_pair(lines[j]) s.measurement[pair[0].lower()] = pair[1] # Read signature spectrum pairs = [] for line in lines[j:]: line = line.strip() if len(line) == 0: continue pair = line.split() nItems = len(pair) # Try to handle invalid values on signature lines if nItems == 1: logger.info('Skipping single item (%s) on signature line for %s', pair[0], filename) continue elif nItems > 2: logger.info('Skipping more than 2 values on signature line for %s', filename) continue try: x = float(pair[0]) except: logger.info('Corrupt signature line in file %s', filename) if x == 0: logger.info('Skipping zero wavelength value in file %s', filename) continue elif x < 0: logger.info('Skipping negative wavelength value in file %s', filename) continue pairs.append(pair) [x, y] = [list(v) for v in zip(*pairs)] # Make sure wavelengths are ascending if float(x[0]) > float(x[-1]): x.reverse() y.reverse() s.x = [float(val) for val in x] s.y = [float(val) for val in y] s.measurement['first x value'] = x[0] s.measurement['last x value'] = x[-1] s.measurement['number of x values'] = len(x) return s class EcostressDatabase(AsterDatabase): '''A relational database to manage ECOSTRESS spectral library data.''' @classmethod def create(cls, filename, data_dir=None): '''Creates an ECOSTRESS relational database by parsing ECOSTRESS data files. Arguments: `filename` (str): Name of the new sqlite database file to create. `data_dir` (str): Path to the directory containing ECOSTRESS library data files. If this argument is not provided, no data will be imported. Returns: An :class:`~spectral.database.EcostressDatabase` object. Example:: >>> EcostressDatabase.create("ecostress.db", "./eco_data_ver1/") This is a class method (it does not require instantiating an EcostressDatabase object) that creates a new database by parsing all of the files in the ECOSTRESS library data directory. Normally, this should only need to be called once. Subsequently, a corresponding database object can be created by instantiating a new EcostressDatabase object with the path the database file as its argument. For example:: >>> from spectral.database.ecostress import EcostressDatabase >>> db = EcostressDatabase("~/ecostress.db") ''' import os if os.path.isfile(filename): raise Exception('Error: Specified file already exists.') db = cls() db._connect(filename) for schema in cls.schemas: db.cursor.execute(schema) if data_dir: db._import_files(data_dir) return db def read_file(self, filename): return read_ecostress_file(filename) def _import_files(self, data_dir, ignore=None): '''Import each file from the ECOSTRESS library into the database.''' from glob import glob import numpy import os logger = logging.getLogger('spectral') if not os.path.isdir(data_dir): raise Exception('Error: Invalid directory name specified.') if ignore is not None: filesToIgnore = [data_dir + '/' + f for f in ignore] else: filesToIgnore = [] numFiles = 0 numIgnored = 0 sigID = 1 class Sig: pass sigs = [] for f in glob(data_dir + '/*spectrum.txt'): if f in filesToIgnore: numIgnored += 1 continue logger.info('Importing ECOSTRESS file %s.', f) numFiles += 1 sig = self.read_file(f) s = sig.sample if 'particle size' in s: if s['particle size'].lower == 'liquid': phase = 'liquid' else: phase = 'solid' else: phase = 'unknown' s['particle size'] = 'none' if 'sample no.' in s: sampleNum = s['sample no.'] else: sampleNum = '' subclass = s.get('subclass', 'none') if subclass == 'none' and 'genus' in s: subclass = s['genus'] id = self._add_sample(s['name'], s['type'], s['class'], subclass, s['particle size'], sampleNum, s['owner'], s['origin'], phase, s['description']) instrument = os.path.basename(f).split('.')[1] environment = 'lab' m = sig.measurement # Correct numerous mispellings of "reflectance" and "transmittance" yUnit = m['y units'] if yUnit.find('reflectence') > -1: yUnit = 'reflectance (percent)' elif yUnit.find('trans') == 0: yUnit = 'transmittance (percent)' measurement = m['measurement'] if measurement[0] == 't': measurement = 'transmittance' self._add_signature(id, -1, instrument, environment, measurement, m['x units'], yUnit, m['first x value'], m['last x value'], sig.x, sig.y) if numFiles == 0: logger.warning('No ECOSTRESS data files were found in "%s".', data_dir) else: logger.info('Processed %d ECOSTRESS files.', numFiles) if numIgnored > 0: msg = 'Ignored the following %d bad files:' % (numIgnored) for f in filesToIgnore: msg += '\n\t' + f logger.info(msg) return sigs spectral-0.22.4/spectral/database/spectral_database.py000066400000000000000000000046301412674721200230210ustar00rootroot00000000000000 class SpectralDatabase: def _connect(self, sqlite_filename): '''Establishes a connection to the Specbase sqlite database.''' import sqlite3 self.db = sqlite3.connect(sqlite_filename) self.cursor = self.db.cursor() def query(self, sql, args=None): '''Returns the result of an arbitrary SQL statement. Arguments: `sql` (str): An SQL statement to be passed to the database. Use "?" for variables passed into the statement. `args` (tuple): Optional arguments which will replace the "?" placeholders in the `sql` argument. Returns: An :class:`sqlite3.Cursor` object with the query results. Example:: >>> sql = r'SELECT SpectrumID, Name FROM Samples, Spectra ' + ... 'WHERE Spectra.SampleID = Samples.SampleID ' + ... 'AND Name LIKE "%grass%" AND MinWavelength < ?' >>> args = (0.5,) >>> cur = db.query(sql, args) >>> for row in cur: ... print row ... (356, u'dry grass') (357, u'grass') ''' if args: return self.cursor.execute(sql, args) else: return self.cursor.execute(sql) def print_query(self, sql, args=None): '''Prints the text result of an arbitrary SQL statement. Arguments: `sql` (str): An SQL statement to be passed to the database. Use "?" for variables passed into the statement. `args` (tuple): Optional arguments which will replace the "?" placeholders in the `sql` argument. This function performs the same query function as :meth:`spectral.database.SpectralDatabase.query` except query results are printed to **stdout** instead of returning a cursor object. Example: >>> sql = r'SELECT SpectrumID, Name FROM Samples, Spectra ' + ... 'WHERE Spectra.SampleID = Samples.SampleID ' + ... 'AND Name LIKE "%grass%" AND MinWavelength < ?' >>> args = (0.5,) >>> db.print_query(sql, args) 356|dry grass 357|grass ''' ret = self.query(sql, args) for row in ret: print("|".join([str(x) for x in row])) spectral-0.22.4/spectral/database/usgs.py000066400000000000000000000556541412674721200203550ustar00rootroot00000000000000''' Code for reading and managing USGS spectral library data. References: Kokaly, R.F., Clark, R.N., Swayze, G.A., Livo, K.E., Hoefen, T.M., Pearson, N.C., Wise, R.A., Benzel, W.M., Lowers, H.A., Driscoll, R.L., and Klein, A.J., 2017, USGS Spectral Library Version 7: U.S. Geological Survey Data Series 1035, 61 p., https://doi.org/10.3133/ds1035. ''' from __future__ import absolute_import, division, print_function, unicode_literals from spectral.utilities.python23 import IS_PYTHON3, tobytes, frombytes from .spectral_database import SpectralDatabase import re import logging import sqlite3 import array if IS_PYTHON3: def readline(fin): return fin.readline() def open_file(filename): return open(filename, encoding='iso-8859-1') else: def readline(fin): return fin.readline().decode('iso-8859-1') def open_file(filename): return open(filename) table_schemas = [ 'CREATE TABLE Samples (SampleID INTEGER PRIMARY KEY, LibName TEXT, Record INTEGER, ' 'Description TEXT, Spectrometer TEXT, Purity TEXT, MeasurementType TEXT, Chapter TEXT, FileName TEXT, ' 'AssumedWLSpmeterDataID INTEGER, ' 'NumValues INTEGER, MinValue FLOAT, MaxValue FLOAT, ValuesArray BLOB)', 'CREATE TABLE SpectrometerData (SpectrometerDataID INTEGER PRIMARY KEY, LibName TEXT, ' 'Record INTEGER, MeasurementType TEXT, Unit TEXT, Name TEXT, Description TEXT, FileName TEXT, ' 'NumValues INTEGER, MinValue FLOAT, MaxValue FLOAT, ValuesArray BLOB)' ] arraytypecode = chr(ord('f')) def array_from_blob(blob): a = array.array(arraytypecode) frombytes(a, blob) return a def array_to_blob(arr): return sqlite3.Binary(tobytes(array.array(arraytypecode, arr))) # Actually these are not all spectrometer names, but kind of it. _spectrometer_names = { 'ASD': ['ASD'], 'ASTER': ['ASTER'], 'AVIRIS': ['AVIRIS', 'aviris'], 'BECK': ['BECK'], 'CRISM JOINED MTR3': ['CRISM Bandpass(FWHM) JOINED MTR3', 'CRISM Waves JOINED MTR3', 'CRISM Bandpass JOINED MTR3', 'CRISM JOINED MTR3'], 'CRISM GLOBAL': ['CRISM Bandpass(FWHM) GLOBAL', 'CRISM Wavelengths GLOBAL', 'CRISM Waves GLOBAL', 'CRISM GLOBAL'], 'Hyperion': ['Hyperion'], 'HyMap2': ['HyMap2'], 'Landsat8': ['Landsat8'], 'M3': ['M3'], 'NIC4': ['NIC4'], 'Sentinel2': ['Sentinel2', 'Sentinel-2'], 'VIMS': ['VIMS'], 'WorldView3': ['WorldView3'] } class SpectrometerData: ''' Holds data for spectrometer, from USGS spectral library. ''' def __init__(self, libname, record, measurement_type, unit, spectrometer_name, description, file_name, values): self.libname = libname self.record = record self.measurement_type = measurement_type self.unit = unit self.spectrometer_name = spectrometer_name self.description = description self.file_name = file_name self.values = values def header(self): ''' Returns: String representation of basic meta data. ''' return '{0} Record={1}: {2} {3} {4}'.format(self.libname, self.record, self.measurement, self.description) @ classmethod def read_from_file(cls, filename): ''' Constructs SpectrometerData from file. Arguments: `filename` (str): Path to file containing data. Returns: A `SpectrometerData` constructed from data parsed from file. ''' import os logger = logging.getLogger('spectral') with open_file(filename) as f: header_line = readline(f) if not header_line: raise Exception( '{0} has empty header line or no lines at all.'.format(filename)) libname, record, measurement_type, unit, spectrometer_name, description = \ SpectrometerData._parse_header(header_line.strip()) values = [] for line in f: if not line: break try: values.append(float(line.strip())) except: logger.error('In file %s found unparsable line.', filename) file_name = os.path.basename(filename) return cls(libname, record, measurement_type, unit, spectrometer_name, description, file_name, values) @staticmethod def _find_spectrometer_name(header_line): for sname, alt_names in _spectrometer_names.items(): for alt_name in alt_names: if alt_name in header_line: return sname raise Exception( 'Could not find spectrometer for header {0}'.format(header_line)) @staticmethod def _assume_measurement_type(header_line): header_line = header_line.lower() # The order of checking these things is important. if 'wavelength' in header_line or 'waves' in header_line: return 'Wavelengths' if 'bandpass' in header_line or 'fwhm' in header_line or 'bandwidths' in header_line: return 'Bandpass' if 'resolution' in header_line: return 'Resolution' if 'wavenumber' in header_line: return 'Wavenumber' if 'srf' in header_line: return 'SRF' raise Exception( 'Could not assume measurement type for header line {0}'.format(header_line)) @ staticmethod def _assume_unit(header_line, measurement_type): if measurement_type == 'Wavelengths' or measurement_type == 'Bandpass' or measurement_type == 'Resolution': if re.search(r'\bnm\b', header_line) is not None: return 'nanometer' if 'nanometer' in header_line: return 'nanometer' # 'um', 'microns' are usually found in these files, but this is default # anyway. return 'micrometer' elif measurement_type == 'Wavenumber': return 'cm^-1' elif measurement_type == 'SRF': return 'none' else: return 'unknown' @ staticmethod def _parse_header(header_line): # It is difficult to parse this data, # things are separated by spaces, but inside of what should be single datum, # there are spaces, so only human can get it right. elements = header_line.split() libname = elements[0] # From 'Record=1234:' extract 1234. record = int(elements[1].split('=')[1][:-1]) # Join everything after record into description. description = ' '.join(elements[2:]) measurement_type = SpectrometerData._assume_measurement_type( header_line) unit = SpectrometerData._assume_unit(header_line, measurement_type) spectrometer_name = SpectrometerData._find_spectrometer_name( header_line) return libname, record, measurement_type, unit, spectrometer_name, description class SampleData: ''' Holds parsed data for single sample from USGS spectral library. ''' def __init__(self, libname=None, record=None, description=None, spectrometer=None, purity=None, measurement_type=None, chapter=None, file_name=None, values=None): self.libname = libname self.record = record self.description = description self.spectrometer = spectrometer self.purity = purity self.measurement_type = measurement_type self.chapter = chapter self.file_name = file_name self.values = values def header(self): ''' Returns: String representation of basic meta data. ''' return '{0} Record={1}: {2} {3}{4} {5}'.format(self.libname, self.record, self.description, self.spectrometer, self.purity, self.measurement_type) @staticmethod def _parse_header(header_line): elements = header_line.split() libname = elements[0] # From 'Record=1234:' extract 1234. record = int(elements[1].split('=')[1][:-1]) # Join everything after record into description. description = ' '.join(elements[2:]) # Split 'AVIRIS13aa' into ['', 'AVIRIS13', 'aa', '']. smpurity = re.split('([A-Z0-9]+)([a-z]+)', elements[-2]) # There is case with capital leters like 'NIC4AA' if len(smpurity) == 1: smpurity = re.split('([A-Z]+[0-9])([A-Z]+)', elements[-2]) smpurity[2] = smpurity[2].lower() spectrometer = smpurity[1] purity = smpurity[2] measurement_type = elements[-1] return libname, record, description, spectrometer, purity, measurement_type @classmethod def read_from_file(cls, filename, chapter=None): ''' Constructs SampleData from file. Arguments: `filename` (str): Path to file containing data. Returns: A `SampleData` constructed from data parsed from file. ''' import os logger = logging.getLogger('spectral') with open(filename) as f: header_line = f.readline() if not header_line: raise Exception( '{0} has empty header line or no lines at all.'.format(filename)) libname, record, description, spectrometer, purity, measurement_type = \ SampleData._parse_header(header_line.strip()) values = [] for line in f: if not line: break try: values.append(float(line.strip())) except: logger.error('In file %s found unparsable line.', filename) file_name = os.path.basename(filename) return cls(libname, record, description, spectrometer, purity, measurement_type, chapter, file_name, values) class USGSDatabase(SpectralDatabase): '''A relational database to manage USGS spectral library data.''' schemas = table_schemas def _assume_wavelength_spectrometer_data_id(self, sampleData): # We can't know this for sure, but these heuristics haven't failed so far. # Prepare paramters. # These parameters are mandatory to match. libname = sampleData.libname num_values = len(sampleData.values) # Spectrometer might not match in subdirectories where data is convolved # or resampled. In other directories, without spectrometer there is # few possible choices, so spectrometer isolates the one we need. spectrometer = sampleData.spectrometer logger = logging.getLogger('spectral') # Start with the most specific. query = '''SELECT SpectrometerDataID FROM SpectrometerData WHERE MeasurementType = 'Wavelengths' AND LibName = ? AND NumValues = ? AND Name = ?''' result = self.cursor.execute( query, (libname, num_values, spectrometer)) rows = result.fetchall() if len(rows) == 0: query = '''SELECT SpectrometerDataID FROM SpectrometerData WHERE MeasurementType = 'Wavelengths' AND LibName = ? AND NumValues = ? AND Name LIKE ?''' result = self.cursor.execute( # ASDFR -> ASD, and '%' just to be sure. query, (libname, num_values, spectrometer[:3] + '%')) rows = result.fetchall() if len(rows) >= 1: if len(rows) > 1: logger.warning('Found multiple spectrometers with measurement_type Wavelengths, ' ' LibName %s, NumValues %d and Name %s', libname, num_values, spectrometer) return rows[0][0] # Try to be less specific without spectrometer name. query = '''SELECT SpectrometerDataID FROM SpectrometerData WHERE MeasurementType = 'Wavelengths' AND LibName = ? AND NumValues = ?''' result = self.cursor.execute(query, (libname, num_values)) rows = result.fetchall() if len(rows) < 1: raise Exception('Wavelengths for spectrometer not found, for LibName = {0} and NumValues = {1}, from file {2}'.format( libname, num_values, sampleData.file_name)) if len(rows) > 1: logger.warning('Found multiple spectrometers with measurement_type Wavelengths, ' ' LibName %s and NumValues %d, from file %s', libname, num_values, sampleData.file_name) return rows[0][0] def _add_sample_data(self, spdata): sql = '''INSERT INTO Samples (LibName, Record, Description, Spectrometer, Purity, MeasurementType, Chapter, FileName, AssumedWLSpmeterDataID, NumValues, MinValue, MaxValue, ValuesArray) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''' values = array_to_blob(spdata.values) num_values = len(spdata.values) min_value = min(spdata.values) max_value = max(spdata.values) assumedWLSpmeterDataID = self._assume_wavelength_spectrometer_data_id(spdata) self.cursor.execute(sql, (spdata.libname, spdata.record, spdata.description, spdata.spectrometer, spdata.purity, spdata.measurement_type, spdata.chapter, spdata.file_name, assumedWLSpmeterDataID, num_values, min_value, max_value, values)) rowId = self.cursor.lastrowid self.db.commit() return rowId def _add_spectrometer_data(self, spdata): sql = '''INSERT INTO SpectrometerData (LibName, Record, MeasurementType, Unit, Name, Description, FileName, NumValues, MinValue, MaxValue, ValuesArray) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''' values = array_to_blob(spdata.values) num_values = len(spdata.values) min_value = min(spdata.values) max_value = max(spdata.values) self.cursor.execute( sql, (spdata.libname, spdata.record, spdata.measurement_type, spdata.unit, spdata.spectrometer_name, spdata.description, spdata.file_name, num_values, min_value, max_value, values)) rowId = self.cursor.lastrowid self.db.commit() return rowId @classmethod def create(cls, filename, usgs_data_dir=None): '''Creates an USGS relational database by parsing USGS data files. Arguments: `filename` (str): Name of the new sqlite database file to create. `usgs_data_dir` (str): Path to the USGS ASCII data directory. This directory should contain subdirectories, which containes chapter directories. E.g. if provided `usgs_data_dir` is '/home/user/usgs/ASCIIdata', then relative path to single sample could be 'ASCIIdata_splib07b/ChapterL_Liquids/splib07b_H2O-Ice_GDS136_77K_BECKa_AREF.txt' If this argument is not provided, no data will be imported. Returns: An :class:`~spectral.database.USGSDatabase` object. Example:: >>> USGSDatabase.create("usgs_lib.db", "/home/user/usgs/ASCIIdata") This is a class method (it does not require instantiating an USGSDatabase object) that creates a new database by parsing files in the USGS library ASCIIdata directory. Normally, this should only need to be called once. Subsequently, a corresponding database object can be created by instantiating a new USGSDatabase object with the path the database file as its argument. For example:: >>> from spectral.database.usgs import USGSDatabase >>> db = USGSDatabase("usgs_lib.db") ''' import os if os.path.isfile(filename): raise Exception('Error: Specified file already exists.') db = cls() db._connect(filename) for schema in cls.schemas: db.cursor.execute(schema) if usgs_data_dir: db._import_files(usgs_data_dir) return db def __init__(self, sqlite_filename=None): '''Creates a database object to interface an existing database. Arguments: `sqlite_filename` (str): Name of the database file. If this argument is not provided, an interface to a database file will not be established. Returns: An :class:`~spectral.USGSDatabase` connected to the database. ''' from spectral.io.spyfile import find_file_path if sqlite_filename: self._connect(find_file_path(sqlite_filename)) else: self.db = None self.cursor = None def _import_files(self, data_dir): from glob import glob import numpy import os logger = logging.getLogger('spectral') if not os.path.isdir(data_dir): raise Exception('Error: Invalid directory name specified.') num_sample_files = 0 num_spectrometer_files = 0 num_failed_sample_files = 0 num_failed_spectromter_files = 0 for sublib in os.listdir(data_dir): sublib_dir = os.path.join(data_dir, sublib) if not os.path.isdir(sublib_dir): continue # Process instrument data one by one. for f in glob(sublib_dir + '/*.txt'): logger.info('Importing spectrometer file %s', f) try: spdata = SpectrometerData.read_from_file(f) self._add_spectrometer_data(spdata) num_spectrometer_files += 1 except Exception as e: logger.error('Failed to import spectrometer file %s', f) logger.error(e) num_failed_spectromter_files += 1 # Go into each chapter directory and process individual samples. for chapter in os.listdir(sublib_dir): # Skip errorbars directory. Maybe add support for parsing it later. if chapter == 'errorbars': continue chapter_dir = os.path.join(sublib_dir, chapter) if not os.path.isdir(chapter_dir): continue for f in glob(chapter_dir + '/*.txt'): logger.info('Importing sample file %s', f) try: spdata = SampleData.read_from_file(f, chapter) self._add_sample_data(spdata) num_sample_files += 1 except Exception as e: logger.error( 'Failed to import sample file %s', f) logger.error(e) num_failed_sample_files += 1 logger.info('Imported %d sample files and %d spectrometer files. ' '%d failed sample files, and %d failed spectrometer files.', num_sample_files, num_spectrometer_files, num_failed_sample_files, num_failed_spectromter_files) def get_spectrum(self, sampleID): '''Returns a spectrum from the database. Usage: (x, y) = usgs.get_spectrum(sampleID) Arguments: `sampleID` (int): The **SampleID** value for the desired spectrum from the **Samples** table in the database. Returns: `x` (list): Band centers for the spectrum. This is extraced from assumed spectrometer for given sample. `y` (list): Spectrum data values for each band. Returns a pair of vectors containing the wavelengths and measured values values of a measurment. ''' import array query = '''SELECT ValuesArray, AssumedWLSpmeterDataID FROM Samples WHERE SampleID = ?''' result = self.cursor.execute(query, (sampleID,)) rows = result.fetchall() if len(rows) < 1: raise Exception('Measurement record not found.') y = array_from_blob(rows[0][0]) assumedWLSpmeterDataID = rows[0][1] query = '''SELECT ValuesArray FROM SpectrometerData WHERE SpectrometerDataID = ?''' result = self.cursor.execute( query, (assumedWLSpmeterDataID,)) rows = result.fetchall() if len(rows) < 1: raise Exception('Measurement (wavelengths) record not found.') x = array_from_blob(rows[0][0]) return (list(x), list(y)) def create_envi_spectral_library(self, spectrumIDs, bandInfo): '''Creates an ENVI-formatted spectral library for a list of spectra. Arguments: `spectrumIDs` (list of ints): List of **SampleID** values for of spectra in the "Samples" table of the USGS database. `bandInfo` (:class:`~spectral.BandInfo`): The spectral bands to which the original USGS library spectra will be resampled. Returns: A :class:`~spectral.io.envi.SpectralLibrary` object. The IDs passed to the method should correspond to the SampleID field of the USGS database "Samples" table. All specified spectra will be resampled to the same discretization specified by the bandInfo parameter. See :class:`spectral.BandResampler` for details on the resampling method used. Note that expected units for bands are micrometers. ''' from spectral.algorithms.resampling import BandResampler from spectral.io.envi import SpectralLibrary import numpy import unicodedata spectra = numpy.empty((len(spectrumIDs), len(bandInfo.centers))) cursor = self.cursor.execute(''' SELECT a.ValuesArray, b.ValuesArray, a.Description, b.Unit FROM Samples AS a INNER JOIN SpectrometerData AS b ON a.AssumedWLSpmeterDataID = b.SpectrometerDataID WHERE a.SampleID IN ({0})'''.format(','.join(['?']*len(spectrumIDs))), spectrumIDs) names = [] for i, s in enumerate(cursor): y = array_from_blob(s[0]) x = array_from_blob(s[1]) name = s[2] unit = s[3] if unit == 'nanometers': x /= 1000 resample = BandResampler( x, bandInfo.centers, None, bandInfo.bandwidths) spectra[i] = resample(y) names.append(unicodedata.normalize('NFKD', name). encode('ascii', 'ignore')) header = {} header['wavelength units'] = 'um' header['spectra names'] = names header['wavelength'] = bandInfo.centers header['fwhm'] = bandInfo.bandwidths return SpectralLibrary(spectra, header, {}) spectral-0.22.4/spectral/graphics/000077500000000000000000000000001412674721200170375ustar00rootroot00000000000000spectral-0.22.4/spectral/graphics/__init__.py000066400000000000000000000004101412674721200211430ustar00rootroot00000000000000from __future__ import absolute_import, division, print_function, unicode_literals from .graphics import (save_rgb, view, view_indexed, view_cube, view_nd, get_rgb) from .spypylab import imshow, ImageView from .colorscale import ColorScale spectral-0.22.4/spectral/graphics/colorscale.py000066400000000000000000000111111412674721200215320ustar00rootroot00000000000000''' Code for converting pixel data to RGB values. ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np class ColorScale: ''' A color scale class to map scalar values to rgb colors. The class allows associating colors with particular scalar values, setting a background color (for values below threshold), andadjusting the scale limits. The :meth:`__call__` operator takes a scalar input and returns the corresponding color, interpolating between defined colors. ''' def __init__(self, levels, colors, num_tics=0): ''' Creates the ColorScale. Arguments: `levels` (list of numbers): Scalar levels to which the `colors` argument will correspond. `colors` (list of 3-tuples): RGB 3-tuples that define the colors corresponding to `levels`. `num_tics` (int): The total number of colors in the scale, not including the background color. This includes the colors given in the `colors` argument, as well as interpolated color values. If not specified, only the colors in the `colors` argument will be used (i.e., num_tics = len(colors). ''' import numpy as np if len(colors.shape) != 2 or colors.shape[1] != 3: raise 'colors array has invalid shape.' if len(levels) != colors.shape[0]: raise 'Number of scale levels and colors do not match.' if num_tics == 0: num_tics = len(colors) if num_tics < 2: msg = 'There must be at least two tics in the color scale.' raise ValueError(msg) # Make sure scale levels are floats if type(levels) in (list, tuple): levels = [float(x) for x in levels] elif isinstance(levels, np.ndarray): levels = levels.astype(float) self.span = levels[-1] - levels[0] self.max = levels[-1] self.min = levels[0] self.tics = np.linspace(self.min, self.max, num_tics) self.colorTics = np.zeros((len(self.tics), 3), int) self.size = len(self.tics) self.bgColor = np.array([0, 0, 0]) j = 1 dcolor = colors[1] - colors[0] dlevel = levels[1] - levels[0] for i in range(len(self.tics)): while self.tics[i] >= levels[j] and j < len(levels) - 1: j += 1 dcolor = colors[j] - colors[j - 1] dlevel = levels[j] - levels[j - 1] self.colorTics[i] = (colors[j - 1] + (self.tics[i] - levels[j - 1]) / dlevel * dcolor).astype(int) def __call__(self, val): '''Returns the scale color associated with the given value.''' if val < self.min: return self.bgColor elif val >= self.max: return self.colorTics[-1] else: return self.colorTics[int((float(val) - self.min) / self.span * self.size)] def set_background_color(self, color): '''Sets RGB color used for values below the scale minimum. Arguments: `color` (3-tuple): An RGB triplet ''' if type(color) in (list, tuple): color = np.array(color) if len(color.shape) != 1 or color.shape[0] != 3: raise 'Color value must be have exactly 3 elements.' self.bgColor = color def set_range(self, min, max): '''Sets the min and max values of the color scale. The distribution of colors within the scale will stretch or shrink accordingly. ''' self.min = min self.max = max self.span = max - min def create_default_color_scale(ntics=0): '''Returns a black-blue-green-red-yellow-white color scale. Arguments: `ntics` (integer): Total number of colors in the scale. If this value is 0, no interpolated colors will be used. ''' mycolors = np.array([[0, 0, 0], [0, 0, 255], [0, 255, 0], [255, 0, 0], [255, 255, 0], [255, 255, 255]]) if ntics != 0 and ntics < len(mycolors): raise ValueError('Any non-zero value of `ntics` must be greater than' ' {}.'.format(len(mycolors))) levels = np.array([0., 10., 20., 30., 40., 50.]) scale = ColorScale(levels, mycolors, ntics) return scale default_color_scale = create_default_color_scale() spectral-0.22.4/spectral/graphics/graphics.py000066400000000000000000000642001412674721200212130ustar00rootroot00000000000000''' Common functions for extracting and manipulating data for graphical display. ''' from __future__ import absolute_import, division, print_function, unicode_literals import io from numbers import Number import numpy as np import sys import time import warnings from ..algorithms.spymath import get_histogram_cdf_points from ..config import spy_colors from ..image import Image from ..spectral import settings class WindowProxy(object): '''Base class for proxy objects to access data from display windows.''' def __init__(self, window): self._window = window class SpyWindow(): def get_proxy(self): return WindowProxy(self) def view(*args, **kwargs): ''' Opens a window and displays a raster greyscale or color image. Usage:: view(source, bands=None, **kwargs) Arguments: `source` (:class:`spectral.Image` or :class:`numpy.ndarray`): Source image data to display. `source` can be and instance of a :class:`spectral.Image` (e.g., :class:`spectral.SpyFile` or :class:`spectral.ImageArray`) or a :class:`numpy.ndarray`. `source` must have shape `MxN` or `MxNxB`. `bands` (3-tuple of ints): Optional list of indices for bands to display in the red, green, and blue channels, respectively. Keyword Arguments: `stretch` (bool): If `stretch` evaluates True, the highest value in the data source will be scaled to maximum color channel intensity. `stretch_all` (bool): If `stretch_all` evaluates True, the highest value of the data source in each color channel will be set to maximum intensity. `bounds` (2-tuple of ints): Clips the input data at (lower, upper) values. `title` (str): Text to display in the new window frame. `source` is the data source and can be either a :class:`spectral.Image` object or a numpy array. If `source` has shape `MxN`, the image will be displayed in greyscale. If its shape is `MxNx3`, the three layers/bands will be displayed as the red, green, and blue components of the displayed image, respectively. If its shape is `MxNxB`, where `B > 3`, the first, middle, and last bands will be displayed in the RGB channels, unless `bands` is specified. ''' from .rasterwindow import RasterWindow if not running_ipython(): warn_no_ipython() check_wx_app() rgb = get_rgb(*args, **kwargs) # To plot pixel spectrum on double-click, create a reference # back to the original SpyFile object. if isinstance(args[0], Image): kwargs["data source"] = args[0] if "colors" not in kwargs: rgb = (rgb * 255).astype(np.uint8) else: rgb = rgb.astype(np.uint8) frame = RasterWindow(None, -1, rgb, **kwargs) frame.Raise() frame.Show() return frame.get_proxy() def view_cube(data, *args, **kwargs): '''Renders an interactive 3D hypercube in a new window. Arguments: `data` (:class:`spectral.Image` or :class:`numpy.ndarray`): Source image data to display. `data` can be and instance of a :class:`spectral.Image` (e.g., :class:`spectral.SpyFile` or :class:`spectral.ImageArray`) or a :class:`numpy.ndarray`. `source` must have shape `MxN` or `MxNxB`. Keyword Arguments: `bands` (3-tuple of ints): 3-tuple specifying which bands from the image data should be displayed on top of the cube. `top` (:class:`numpy.ndarray` or :class:`PIL.Image`): Data to display on top of the cube. This will supercede the `bands` keyword. `scale` (:class:`spectral.ColorScale`) A color scale to be used for color in the sides of the cube. If this keyword is not specified, :obj:`spectral.graphics.colorscale.defaultColorScale` is used. `size` (2-tuple of ints): Width and height (in pixels) for initial size of the new window. `background` (3-tuple of floats): Background RGB color of the scene. Each value should be in the range [0, 1]. If not specified, the background will be black. `title` (str): Title text to display in the new window frame. This function opens a new window, renders a 3D hypercube, and accepts keyboard input to manipulate the view of the hypercube. Accepted keyboard inputs are printed to the console output. Focus must be on the 3D window to accept keyboard input. ''' from .hypercube import HypercubeWindow if not running_ipython(): warn_no_ipython() check_wx_app() window = HypercubeWindow(data, None, -1, *args, **kwargs) window.Show() window.Raise() return window.get_proxy() def view_nd(data, *args, **kwargs): ''' Creates a 3D window that displays ND data from an image. Arguments: `data` (:class:`spectral.ImageArray` or :class:`numpy.ndarray`): Source image data to display. `data` can be and instance of a :class:`spectral.ImageArray or a :class:`numpy.ndarray`. `source` must have shape `MxNxB`, where M >= 3. Keyword Arguments: `classes` (:class:`numpy.ndarray`): 2-dimensional array of integers specifying the classes of each pixel in `data`. `classes` must have the same dimensions as the first two dimensions of `data`. `features` (list or list of integer lists): This keyword specifies which bands/features from `data` should be displayed in the 3D window. It must be defined as one of the following: #. A length-3 list of integer feature IDs. In this case, the data points will be displayed in the positive x,y,z octant using features associated with the 3 integers. #. A length-6 list of integer feature IDs. In this case, each integer specifies a single feature index to be associated with the coordinate semi-axes x, y, z, -x, -y, and -z (in that order). Each octant will display data points using the features associated with the 3 semi-axes for that octant. #. A length-8 list of length-3 lists of integers. In this case, each length-3 list specfies the features to be displayed in a single octants (the same semi-axis can be associated with different features in different octants). Octants are ordered starting with the postive x,y,z octant and procede counterclockwise around the z-axis, then procede similarly around the negative half of the z-axis. An octant triplet can be specified as None instead of a list, in which case nothing will be rendered in that octant. `labels` (list): List of labels to be displayed next to the axis assigned to a feature. If not specified, the feature index is shown by default. The `str()` function will be called on each item of the list so, for example, a list of wavelengths can be passed as the labels. `size` (2-tuple of ints) Specifies the initial size (pixel rows/cols) of the window. `title` (string) The title to display in the ND window title bar. Returns an NDWindowProxy object with a `classes` member to access the current class labels associated with data points and a `set_features` member to specify which features are displayed. ''' from .ndwindow import NDWindow, validate_args if not running_ipython(): warn_no_ipython() check_wx_app() validate_args(data, *args, **kwargs) window = NDWindow(data, None, -1, *args, **kwargs) window.Show() window.Raise() return window.get_proxy() def view_indexed(*args, **kwargs): ''' Opens a window and displays a raster image for the provided color map data. Usage:: view_indexed(data, **kwargs) Arguments: `data` (:class:`numpy.ndarray`): An `MxN` array of integer values that correspond to colors in a color palette. Keyword Arguments: `colors` (list of 3-tuples of ints): This parameter provides an alternate color map to use for display. The parameter is a list of 3-tuples defining RGB values, where R, G, and B are in the range [0-255]. `title` (str): Text to display in the new window frame. The default color palette used is defined by :obj:`spectral.spy_colors`. ''' if not running_ipython(): warn_no_ipython() check_wx_app() if 'colors' not in kwargs: kwargs['colors'] = spy_colors return view(*args, **kwargs) def imshow(data, bands=None, **kwargs): '''A wrapper around matplotlib's imshow for multi-band images. Arguments: `data` (SpyFile or ndarray): Can have shape (R, C) or (R, C, B). `bands` (tuple of integers, optional) If `bands` has 3 values, the bands specified are extracted from `data` to be plotted as the red, green, and blue colors, respectively. If it contains a single value, then a single band will be extracted from the image. Keyword Arguments: show_xaxis (bool, default True): Indicates whether to display x-axis ticks and labels. show_yaxis (bool, default True): Indicates whether to display y-axis ticks and labels. This function is a wrapper around :func:`~spectral.graphics.graphics.get_rgb` and matplotlib's imshow. All keyword arguments other than those described above are passed on to the wrapped functions. This function defaults the color scale (imshow's "cmap" keyword) to "gray". To use imshow's default color scale, call this function with keyword `cmap=None`. ''' import matplotlib.pyplot as plt show_xaxis = True show_yaxis = True if 'show_xaxis' in kwargs: show_xaxis = kwargs.pop('show_xaxis') if 'show_yaxis' in kwargs: show_yaxis = kwargs.pop('show_yaxis') rgb_kwargs = {} for k in ['stretch', 'stretch_all', 'bounds']: if k in kwargs: rgb_kwargs[k] = kwargs.pop(k) imshow_kwargs = {'cmap': 'gray'} imshow_kwargs.update(kwargs) rgb = get_rgb(data, bands, **rgb_kwargs) # Allow matplotlib.imshow to apply a color scale to single-band image. if len(data.shape) == 2: rgb = rgb[:, :, 0] ax = plt.imshow(rgb, **imshow_kwargs) if show_xaxis == False: plt.gca().xaxis.set_visible(False) if show_yaxis == False: plt.gca().yaxis.set_visible(False) return ax def make_pil_image(*args, **kwargs): '''Creates a PIL Image object. USAGE: make_pil_image(source [, bands] [stretch=True] [stretch_all=False], [bounds = (lower, upper)] ) See `get_rgb` for description of arguments. ''' try: from PIL import Image, ImageDraw except ImportError: import Image import ImageDraw rgb = get_rgb(*args, **kwargs) rgb = (rgb * 255).astype(np.ubyte) img = Image.fromarray(rgb) return img def save_rgb(filename, data, bands=None, **kwargs): ''' Saves a viewable image to a JPEG (or other format) file. Usage:: save_rgb(filename, data, bands=None, **kwargs) Arguments: `filename` (str): Name of image file to save (e.g. "rgb.jpg") `data` (:class:`spectral.Image` or :class:`numpy.ndarray`): Source image data to display. `data` can be and instance of a :class:`spectral.Image` (e.g., :class:`spectral.SpyFile` or :class:`spectral.ImageArray`) or a :class:`numpy.ndarray`. `data` must have shape `MxN` or `MxNxB`. If thes shape is `MxN`, the image will be saved as greyscale (unless keyword `colors` is specified). If the shape is `MxNx3`, it will be interpreted as three `MxN` images defining the R, G, and B channels respectively. If `B > 3`, the first, middle, and last images in `data` will be used, unless `bands` is specified. `bands` (3-tuple of ints): Optional list of indices for bands to use in the red, green, and blue channels, respectively. Keyword Arguments: `format` (str): The image file format to create. Must be a format recognized by :mod:`PIL` (e.g., 'png', 'tiff', 'bmp'). If `format` is not provided, 'jpg' is assumed. See :func:`~spectral.graphics.graphics.get_rgb` for descriptions of additional keyword arguments. Examples: Save a color view of an image by specifying RGB band indices:: save_image('rgb.jpg', img, [29, 19, 9]]) Save the same image as **png**:: save_image('rgb.png', img, [29, 19, 9]], format='png') Save classification results using the default color palette (note that the color palette must be passed explicitly for `clMap` to be interpreted as a color map):: save_image('results.jpg', clMap, colors=spectral.spy_colors) ''' kwargs = kwargs.copy() fmt = kwargs.pop('format', None) im = make_pil_image(*(data, bands), **kwargs) im.save(filename, fmt, quality=100) def get_rgb(source, bands=None, **kwargs): '''Extract RGB data for display from a SpyFile object or numpy array. USAGE: rgb = get_rgb(source [, bands] [, stretch= | , bounds=] [, stretch_all=]) Arguments: `source` (:class:`spectral.SpyFile` or :class:`numpy.ndarray`): Data source from which to extract the RGB data. `bands` (list of `int`) (optional): Optional triplet of indices which specifies the bands to extract for the red, green, and blue components, respectively. If this arg is not given, SpyFile object, it's metadata dict will be checked to see if it contains a "default bands" item. If it does not, then first, middle and last band will be returned. Keyword Arguments: `stretch` (numeric or tuple): This keyword specifies two points on the cumulative histogram of the input data for performing a linear stretch of RGB value for the data. Numeric values given for this parameter are expected to be between 0 and 1. This keyword can be expressed in three forms: 1. As a 2-tuple. In this case the two values specify the lower and upper points of the cumulative histogram respectively. The specified stretch will be performed independently on each of the three color channels unless the `stretch_all` keyword is set to True, in which case all three color channels will be stretched identically. 2. As a 3-tuple of 2-tuples. In this case, Each channel will be stretched according to its respective 2-tuple in the keyword argument. 3. As a single numeric value. In this case, the value indicates the size of the histogram tail to be applied at both ends of the histogram for each color channel. `stretch=a` is equivalent to `stretch=(a, 1-a)`. If neither `stretch` nor `bounds` are specified, then the default value of `stretch` defined by `spectral.settings.imshow_stretch` will be used. `bounds` (tuple): This keyword functions similarly to the `stretch` keyword, except numeric values are in image data units instead of cumulative histogram values. The form of this keyword is the same as the first two forms for the `stretch` keyword (i.e., either a 2-tuple of numbers or a 3-tuple of 2-tuples of numbers). `stretch_all` (bool): If this keyword is True, each color channel will be scaled independently. `color_scale` (:class:`~spectral.graphics.colorscale.ColorScale`): A color scale to be applied to a single-band image. `auto_scale` (bool): If `color_scale` is provided and `auto_scale` is True, the min/max values of the color scale will be mapped to the min/max data values. `colors` (ndarray): If `source` is a single-band integer-valued np.ndarray and this keyword is provided, then elements of `source` are assumed to be color index values that specify RGB values in `colors`. Examples: Select color limits corresponding to 2% tails in the data histogram: >>> imshow(x, stretch=0.02) Same as above but specify upper and lower limits explicitly: >>> imshow(x, stretch=(0.02, 0.98)) Same as above but specify limits for each RGB channel explicitly: >>> imshow(x, stretch=((0.02, 0.98), (0.02, 0.98), (0.02, 0.98))) ''' return get_rgb_meta(source, bands, **kwargs)[0] def _fill_mask(arr, mask, fill_value): if mask is None: return arr arr[mask == 0] = np.array(fill_value) / 255. return arr def get_rgb_meta(source, bands=None, **kwargs): '''Same as get_rgb but also returns some metadata. Inputs are the same as for get_rgb but the return value is a 2-tuple whose first element is the get_rgb return array and whose second element is a dictionary containing some metadata values for the data RGB conversion. ''' for k in kwargs: if k not in _get_rgb_kwargs: raise ValueError('Invalid keyword: {0}'.format(k)) if bands is None: bands = [] if len(bands) not in (0, 1, 3): raise Exception("Invalid number of bands specified.") meta = {} monochrome = False mask = kwargs.get('mask', None) bg = kwargs.get('bg', settings.imshow_background_color) if isinstance(source, Image) and len(source.shape) == 3: # Figure out which bands to display s = source.shape if len(bands) == 0: # No bands specified. What should we show? if hasattr(source, 'metadata') and \ 'default bands' in source.metadata: try: bands = [int(b) for b in source.metadata['default bands']] except: msg = 'Unable to interpret "default bands" in image ' \ 'metadata. Defaulting to first, middle, & last band.' warnings.warn(msg) elif source.shape[-1] == 1: bands = [0] if len(bands) == 0: # Pick the first, middle, and last bands n = source.shape[-1] bands = [0, n // 2, n - 1] rgb = source.read_bands(bands).astype(float) meta['bands'] = bands else: # It should be a numpy array if source.ndim == 2: source = source[:, :, np.newaxis] s = source.shape if s[2] == 1: if len(bands) == 0: bands = [0] elif np.max(bands) > 0: raise ValueError('Invalid band index for monochrome image.') if s[2] == 3 and len(bands) == 0: # Keep data as is. bands = [0, 1, 2] elif s[2] > 3 and len(bands) == 0: # More than 3 bands in data but no bands specified so take # first, middle, & last bands. bands = [0, s[2] / 2, s[2] - 1] rgb = np.take(source, bands, 2).astype(float) if rgb.ndim == 2: rgb = rgb[:, :, np.newaxis] meta['bands'] = bands color_scale = kwargs.get('color_scale', None) auto_scale = kwargs.get('auto_scale', False) # If it's either color-indexed or monochrome if rgb.shape[2] == 1: s = rgb.shape if "colors" in kwargs: # color-indexed image meta['mode'] = 'indexed' rgb = rgb.astype(int) pal = kwargs["colors"] rgb = pal[rgb[:,:,0]] / 255. return (_fill_mask(rgb, mask, bg), meta) elif color_scale is not None: # Colors should be generated from the supplied color scale # This section assumes rgb colors in the range 0-255. meta['mode'] = 'scaled' scale = color_scale if auto_scale: scale.set_range(min(rgb.ravel()), max(rgb.ravel())) rgb3 = np.zeros((s[0], s[1], 3), int) rgb3 = np.apply_along_axis(scale, 2, rgb) rgb = rgb3.astype(float) / 255. return (_fill_mask(rgb, mask, bg), meta) else: # Only one band of data to display but still need to determine how # to scale the data values meta['mode'] = 'monochrome' monochrome = True rgb = np.repeat(rgb, 3, 2).astype(float) # Perform any requested color enhancements. stretch = kwargs.get('stretch', settings.imshow_stretch) stretch_all = kwargs.get('stretch_all', settings.imshow_stretch_all) bounds = kwargs.get('bounds', None) if bounds is not None: # Data limits for the color stretch are set explicitly bounds = np.array(bounds) if bounds.shape not in ((2,), (3, 2)): msg = '`bounds` keyword must have shape (2,) or (3, 2).' raise ValueError(msg) if bounds.ndim == 1: bounds = np.vstack((bounds,) * 3) rgb_lims = bounds else: # Determine data limits for color stretch from given cumulative # histogram values. if stretch in (True, False): msg = 'Boolean values for `stretch` keyword are deprected. See ' \ 'docstring for `get_rgb`' warnings.warn(msg) stretch = settings.imshow_stretch elif isinstance(stretch, Number): if not (0 <= stretch <= 1): raise ValueError('Value must be between 0 and 1.') stretch = (stretch, 1 - stretch) stretch = np.array(stretch) if stretch.shape not in ((2,), (3, 2)): raise ValueError("`stretch` keyword must be numeric or a " \ "sequence with shape (2,) or (3, 2).") nondata = kwargs.get('ignore', None) if stretch.ndim == 1: if monochrome: s = get_histogram_cdf_points(rgb[:, :, 0], stretch, ignore=nondata) rgb_lims = [s, s, s] elif stretch_all: # Stretch each color component independently rgb_lims = [get_histogram_cdf_points(rgb[:, :, i], stretch, ignore=nondata) \ for i in range(3)] else: # Use a common lower/upper limit for each band by taking # the lowest lower limit and greatest upper limit. lims = np.array([get_histogram_cdf_points(rgb[:,:,i], stretch, ignore=nondata) \ for i in range(3)]) minmax = np.array([lims[:,0].min(), lims[:,1].max()]) rgb_lims = minmax[np.newaxis, :].repeat(3, axis=0) else: if monochrome: # Not sure why anyone would want separate RGB stretches for # a gray-scale image but we'll let them. rgb_lims = [get_histogram_cdf_points(rgb[:,:,0], stretch[i], ignore=nondata) \ for i in range(3)] elif stretch_all: rgb_lims = [get_histogram_cdf_points(rgb[:,:,i], stretch[i], ignore=nondata) \ for i in range(3)] else: msg = 'Can not use common stretch if different stretch ' \ ' parameters are given for each color channel.' raise ValueError(msg) if 'mode' not in meta: meta['mode'] = 'rgb' meta['rgb range'] = rgb_lims for i in range(rgb.shape[2]): (lower, upper) = rgb_lims[i] span = upper - lower if lower == upper: rgb[:, :, i] = 0 else: rgb[:, :, i] = np.clip((rgb[:, :, i] - lower) / span, 0, 1) return (_fill_mask(rgb, mask, bg), meta) # For checking if valid keywords were supplied _get_rgb_kwargs = ('stretch', 'stretch_all', 'bounds', 'colors', 'color_scale', 'auto_scale', 'ignore', 'mask', 'bg') def running_ipython(): '''Returns True if ipython is running.''' try: __IPYTHON__ return True except NameError: return False def warn_no_ipython(): '''Warns that user is calling a GUI function outside of ipython.''' msg = ''' ############################################################################# SPy graphics functions are inteded to be run from IPython with the `pylab` mode set for wxWindows. For example, # ipython --pylab=WX GUI functions will likely not function properly if you aren't running IPython or haven't started it configured for pylab and wx. ############################################################################# ''' if sys.platform == 'darwin': msg += ''' NOTE: If you are running on Mac OS X and receive an error message stating the following: "PyNoAppError: The wx.App object must be created first!", You can avoid this error by running the following commandes immediately after starting your ipython session: In [1]: import wx In [2]: app = wx.App() ############################################################################# ''' warnings.warn(msg, UserWarning) def check_wx_app(): '''Generates a warning if there is not a running wx.App. If spectral.START_WX_APP is True and there is no current app, then on will be started. ''' import spectral import wx if wx.GetApp() is None and spectral.settings.START_WX_APP == True: warnings.warn('\nThere is no current wx.App object - creating one now.', UserWarning) spectral.app = wx.App() spectral-0.22.4/spectral/graphics/hypercube.py000066400000000000000000000435021412674721200214030ustar00rootroot00000000000000''' Code for rendering and manipulating hypercubes. Most users will only need to call the function "hypercube". ''' # The OpenGL code in this file was adapted from a number of OpenGL demo # scripts that were created, ported, and adapted by various authors # including Richard Campbell, John Ferguson, Tony Colston, Tarn Weisner, # Yan Wong, Greg Landrum, and possibly others. # # Source file comments from some of the original files are as follows: # # #------------------------------------------------------------------------ # Ported to PyOpenGL 2.0 by Tarn Weisner Burton 10May2001 # # This code was created by Richard Campbell '99 (ported to Python/PyOpenGL by # # John Ferguson 2000) The port was based on the lesson5 tutorial module by Tony # # Colston (tonetheman@hotmail.com). If you've found this code useful, please # # let me know (email John Ferguson at hakuin@voicenet.com). # # See original source and C based tutorial at http:#nehe.gamedev.net #------------------------------------------------------------------------ # This file found at: # http://lists.wxwidgets.org/archive/wxPython-users/msg11078.html # # This includes the two classes wxGLWindow and wxAdvancedGLWindow # from OpenGL.TK in the PyOpenGL distribution # ported to wxPython by greg Landrum # modified by Y. Wong #------------------------------------------------------------------------ from __future__ import absolute_import, division, print_function, unicode_literals import math import numpy as np try: import wx from wx import glcanvas except ImportError: raise ImportError("Required dependency wx.glcanvas not present") from .. import settings from ..image import Image from ..io.spyfile import SpyFile from .colorscale import create_default_color_scale from .graphics import make_pil_image, SpyWindow DEFAULT_WIN_SIZE = (500, 500) # Default dimensions of image frame DEFAULT_TEXTURE_SIZE = ( 256, 256) # Default size of textures on cube faces def rtp_to_xyz(r, theta, phi): '''Convert spherical polar coordinates to Cartesian''' theta *= math.pi / 180.0 phi *= math.pi / 180.0 s = r * math.sin(theta) return [s * math.cos(phi), s * math.sin(phi), r * math.cos(theta)] def xyz_to_rtp(x, y, z): '''Convert Cartesian coordinates to Spherical Polar.''' r = math.sqrt(x * x + y * y + z * z) rho = math.sqrt(x * x + y * y) phi = math.asin(y / rho) * 180. / math.pi if x < 0.0: phi += 180 theta = math.acos(z / r) * 180. / math.pi return [r, theta, phi] (DOWN, UP) = (1, 0) class MouseHandler: '''A class to enable rotate/zoom functions in an OpenGL window.''' MAX_BUTTONS = 10 def __init__(self, window): self.window = window self.position = None self.event_position = None self.left = UP self.right = UP self.middle = UP def left_down(self, event): self.event_position = (event.X, event.Y) self.position = (event.X, event.Y) self.left = DOWN event.Skip() def left_up(self, event): self.position = (event.X, event.Y) self.left = UP event.Skip() def motion(self, event): '''Handles panning & zooming for mouse click+drag events.''' if DOWN not in (self.left, self.right): return #print 'Mouse movement:', x, y (w, h) = self.window.size dx = event.X - self.position[0] dy = event.Y - self.position[1] if self.left == DOWN: if wx.GetKeyState(wx.WXK_CONTROL): # Mouse movement zooms in/out relative to target position if dx != 0.0: self.window.camera_pos_rtp[0] *= (float(w - dx) / w) elif wx.GetKeyState(wx.WXK_SHIFT): # Mouse movement pans target position in plane of the window camera_pos = np.array(rtp_to_xyz(*self.window.camera_pos_rtp)) view_vec = -np.array(rtp_to_xyz(*self.window.camera_pos_rtp)) zhat = np.array([0.0, 0.0, 1.0]) right = -np.cross(zhat, view_vec) right /= np.sum(np.square(right)) up = np.cross(right, view_vec) up /= np.sum(np.square(up)) dr = right * (4.0 * dx / w) du = up * (4.0 * dy / h) self.window.target_pos += du - dr else: # Mouse movement creates a rotation about the target position xangle = 2.0 * self.window.fovy * float(dx) / h yangle = 2.0 * self.window.fovy * float(dy) / h rtp = self.window.camera_pos_rtp rtp[1] = min(max(rtp[1] - yangle, 0.05), 179.95) self.window.camera_pos_rtp[2] -= xangle self.position = (event.X, event.Y) self.window.Refresh() event.Skip() class HypercubeWindow(wx.Frame, SpyWindow): """A simple class for using OpenGL with wxPython.""" def __init__(self, data, parent, id, *args, **kwargs): global DEFAULT_WIN_SIZE self.kwargs = kwargs self.size = kwargs.get('size', DEFAULT_WIN_SIZE) self.title = kwargs.get('title', 'Hypercube') # # Forcing a specific style on the window. # Should this include styles passed? style = wx.DEFAULT_FRAME_STYLE | wx.NO_FULL_REPAINT_ON_RESIZE wx.Frame.__init__(self, parent, id, self.title, wx.DefaultPosition, wx.Size(*self.size), style, kwargs.get('name', 'Hypercube')) self.gl_initialized = False attribs = (glcanvas.WX_GL_RGBA, # RGBA glcanvas.WX_GL_DOUBLEBUFFER, # Double Buffered glcanvas.WX_GL_DEPTH_SIZE, settings.WX_GL_DEPTH_SIZE) self.canvas = glcanvas.GLCanvas( self, attribList=attribs, size=self.size) self.canvas.context = wx.glcanvas.GLContext(self.canvas) # These members can be modified before calling the show method. self.clear_color = tuple(kwargs.get('background', (0., 0., 0.))) \ + (1.,) self.win_pos = (100, 100) self.fovy = 60. self.znear = 0.1 self.zfar = 10.0 self.target_pos = [0.0, 0.0, 0.0] self.camera_pos_rtp = [7.0, 45.0, 30.0] self.up = [0.0, 0.0, 1.0] self.hsi = data self.cubeHeight = 1.0 self.rotation = [-60, 0, -30] self.distance = -5 self.light = False self.texturesLoaded = False self.mouse_handler = MouseHandler(self) # Set the event handlers. self.canvas.Bind(wx.EVT_ERASE_BACKGROUND, self.on_erase_background) self.canvas.Bind(wx.EVT_SIZE, self.on_resize) self.canvas.Bind(wx.EVT_PAINT, self.on_paint) self.canvas.Bind(wx.EVT_LEFT_DOWN, self.mouse_handler.left_down) self.canvas.Bind(wx.EVT_LEFT_UP, self.mouse_handler.left_up) self.canvas.Bind(wx.EVT_MOTION, self.mouse_handler.motion) self.canvas.Bind(wx.EVT_CHAR, self.on_char) def load_textures(self): import OpenGL.GL as gl global DEFAULT_TEXTURE_SIZE if 'scale' in self.kwargs: scale = self.kwargs['scale'] else: scale = create_default_color_scale(256) data = self.hsi s = data.shape # Create image for top of cube if 'top' in self.kwargs: image = self.kwargs['top'] if isinstance(image, np.ndarray): image = make_pil_image(image) else: if 'bands' in self.kwargs: bands = self.kwargs['bands'] elif isinstance(data, SpyFile) and \ 'default bands' in data.metadata: bands = list(map(int, data.metadata['default bands'])) else: bands = list(range(3)) image = make_pil_image(data, bands) # Read each image so it displays properly when viewed from the outside # of the cube with corners rendered from lower left CCW to upper left. # Read data for sides of cube sides = [np.fliplr(np.rot90(data[s[0] - 1, :, :].squeeze(), 3))] # front face sides.append(np.rot90(data[:, s[1] - 1, :].squeeze(), 3)) # right face sides.append(np.rot90(data[0, :, :].squeeze(), 3)) # back face sides.append(np.fliplr(np.rot90(data[:, 0, :].squeeze(), 3))) # left face # Create images for sides of cube scaleMin = min([min(side.ravel()) for side in sides]) scaleMax = max([max(side.ravel()) for side in sides]) scale.set_range(scaleMin, scaleMax) sideImages = [make_pil_image(side, color_scale=scale, auto_scale=0) for side in sides] images = [image] + sideImages self.textures = gl.glGenTextures(6) texImages = [] (a, b, c) = data.shape texSizes = [(b, a), (b, c), (a, c), (b, c), (a, c), (b, a)] for i in range(len(images)): try: # API change for Pillow img = images[i].tobytes("raw", "RGBX", 0, -1) except: # Fall back to old PIL API img = images[i].tostring("raw", "RGBX", 0, -1) (dim_x, dim_y) = images[i].size texImages.append(img) # Create Linear Filtered Texture gl.glBindTexture(gl.GL_TEXTURE_2D, int(self.textures[i])) gl.glTexParameteri( gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_R, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 3, dim_x, dim_y, 0, gl.GL_RGBA, gl.GL_UNSIGNED_BYTE, texImages[i]) def GetGLExtents(self): """Get the extents of the OpenGL canvas.""" return def SwapBuffers(self): """Swap the OpenGL buffers.""" self.canvas.SwapBuffers() def on_erase_background(self, event): """Process the erase background event.""" pass # Do nothing, to avoid flashing on MSWin def initgl(self): """Initialize OpenGL for use in the window.""" import OpenGL.GL as gl import OpenGL.GLU as glu self.load_textures() gl.glEnable(gl.GL_TEXTURE_2D) gl.glClearColor(*self.clear_color) gl.glClearDepth(1.0) gl.glDepthFunc(gl.GL_LESS) gl.glEnable(gl.GL_DEPTH_TEST) gl.glShadeModel(gl.GL_SMOOTH) gl.glMatrixMode(gl.GL_PROJECTION) # Reset The projection matrix gl.glLoadIdentity() # Calculate aspect ratio of the window (width, height) = self.canvas.GetClientSize() glu.gluPerspective(45.0, float(width) / float(height), 0.1, 100.0) gl.glMatrixMode(gl.GL_MODELVIEW) gl.glLightfv(gl.GL_LIGHT0, gl.GL_AMBIENT, (0.5, 0.5, 0.5, 1.0)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_DIFFUSE, (1.0, 1.0, 1.0, 1.0)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_POSITION, (0.0, 0.0, 2.0, 1.0)) gl.glEnable(gl.GL_LIGHT0) def on_paint(self, event): """Process the drawing event.""" import OpenGL.GL as gl import OpenGL.GLU as glu self.canvas.SetCurrent(self.canvas.context) if not self.gl_initialized: self.initgl() self.gl_initialized = True self.print_help() if self.light: gl.glEnable(gl.GL_LIGHTING) else: gl.glDisable(gl.GL_LIGHTING) gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) gl.glLoadIdentity() gl.glPushMatrix() glu.gluLookAt(*(list(rtp_to_xyz( *self.camera_pos_rtp)) + list(self.target_pos) + list(self.up))) self.draw_cube() gl.glPopMatrix() gl.glFlush() self.SwapBuffers() event.Skip() def draw_cube(self, *args, **kwargs): import OpenGL.GL as gl # Determine cube proportions divisor = max(self.hsi.shape[:2]) hw, hh = [float(x) / divisor for x in self.hsi.shape[:2]] hz = self.cubeHeight # Top Face (note that the texture's corners have to match the quad's) gl.glBindTexture(gl.GL_TEXTURE_2D, int(self.textures[0])) gl.glBegin(gl.GL_QUADS) gl.glTexCoord2f(0.0, 0.0) gl.glVertex3f(hw, -hh, hz) # Bottom Left Of The Texture and Quad gl.glTexCoord2f(1.0, 0.0) gl.glVertex3f(hw, hh, hz) # Bottom Right Of The Texture and Quad gl.glTexCoord2f(1.0, 1.0) gl.glVertex3f( -hw, hh, hz) # Top Right Of The Texture and Quad gl.glTexCoord2f(0.0, 1.0) gl.glVertex3f( -hw, -hh, hz) # Top Left Of The Texture and Quad gl.glEnd() # Far Face gl.glBindTexture(gl.GL_TEXTURE_2D, int(self.textures[3])) gl.glBegin(gl.GL_QUADS) gl.glTexCoord2f(0.0, 0.0) gl.glVertex3f( -hw, hh, -hz) # Top Left Of The Texture and Quad gl.glTexCoord2f(1.0, 0.0) gl.glVertex3f( -hw, -hh, -hz) # Bottom Left Of The Texture and Quad gl.glTexCoord2f(1.0, 1.0) gl.glVertex3f( -hw, -hh, hz) # Bottom Right Of The Texture and Quad gl.glTexCoord2f(0.0, 1.0) gl.glVertex3f( -hw, hh, hz) # Top Right Of The Texture and Quad gl.glEnd() # Near Face gl.glBindTexture(gl.GL_TEXTURE_2D, int(self.textures[1])) gl.glBegin(gl.GL_QUADS) gl.glTexCoord2f(0.0, 0.0) gl.glVertex3f( hw, -hh, -hz) # Top Right Of The Texture and Quad gl.glTexCoord2f(1.0, 0.0) gl.glVertex3f( hw, hh, -hz) # Top Left Of The Texture and Quad gl.glTexCoord2f(1.0, 1.0) gl.glVertex3f( hw, hh, hz) # Bottom Left Of The Texture and Quad gl.glTexCoord2f(0.0, 1.0) gl.glVertex3f( hw, -hh, hz) # Bottom Right Of The Texture and Quad gl.glEnd() # Right face gl.glBindTexture(gl.GL_TEXTURE_2D, int(self.textures[2])) gl.glBegin(gl.GL_QUADS) gl.glTexCoord2f(0.0, 0.0) gl.glVertex3f( hw, hh, -hz) # Bottom Right Of The Texture and Quad gl.glTexCoord2f(1.0, 0.0) gl.glVertex3f( -hw, hh, -hz) # Top Right Of The Texture and Quad gl.glTexCoord2f(1.0, 1.0) gl.glVertex3f( -hw, hh, hz) # Top Left Of The Texture and Quad gl.glTexCoord2f(0.0, 1.0) gl.glVertex3f( hw, hh, hz) # Bottom Left Of The Texture and Quad gl.glEnd() # Left Face gl.glBindTexture(gl.GL_TEXTURE_2D, int(self.textures[4])) gl.glBegin(gl.GL_QUADS) gl.glTexCoord2f(0.0, 0.0) gl.glVertex3f( -hw, -hh, -hz) # Bottom Left Of The Texture and Quad gl.glTexCoord2f(1.0, 0.0) gl.glVertex3f( hw, -hh, -hz) # Bottom Right Of The Texture and Quad gl.glTexCoord2f(1.0, 1.0) gl.glVertex3f( hw, -hh, hz) # Top Right Of The Texture and Quad gl.glTexCoord2f(0.0, 1.0) gl.glVertex3f( -hw, -hh, hz) # Top Left Of The Texture and Quad gl.glEnd() # Bottom Face gl.glBindTexture(gl.GL_TEXTURE_2D, int(self.textures[0])) gl.glBegin(gl.GL_QUADS) gl.glTexCoord2f(0.0, 0.0) gl.glVertex3f( hw, -hh, -hz) # Bottom Left Of The Texture and Quad gl.glTexCoord2f(1.0, 0.0) gl.glVertex3f( hw, hh, -hz) # Bottom Right Of The Texture and Quad gl.glTexCoord2f(1.0, 1.0) gl.glVertex3f( -hw, hh, -hz) # Top Right Of The Texture and Quad gl.glTexCoord2f(0.0, 1.0) gl.glVertex3f( -hw, -hh, -hz) # Top Left Of The Texture and Quad gl.glEnd() def on_resize(self, event): """Process the resize event.""" if wx.VERSION >= (2, 9) or self.canvas.GetContext(): self.canvas.SetCurrent(self.canvas.context) self.Show() size = self.canvas.GetClientSize() self.resize(size.width, size.height) self.canvas.Refresh(False) event.Skip() def resize(self, width, height): """Reshape the OpenGL viewport based on dimensions of the window.""" import OpenGL.GL as gl import OpenGL.GLU as glu self.size = (width, height) gl.glViewport(0, 0, width, height) gl.glMatrixMode(gl.GL_PROJECTION) gl.glLoadIdentity() glu.gluPerspective(self.fovy, float(width) / height, self.znear, self.zfar) gl.glMatrixMode(gl.GL_MODELVIEW) gl.glLoadIdentity() def on_char(self, event): key = event.GetKeyCode() if key == ord('t'): self.cubeHeight += 0.1 elif key == ord('g'): self.cubeHeight -= 0.1 elif key == ord('l'): self.light = not self.light elif key == ord('h'): self.print_help() # self.on_draw() self.on_paint(event) if key == ord('q'): self.Destroy() def print_help(self): print() print('Mouse Functions:') print('----------------') print('left-click & drag -> Rotate cube') print('CTRL+left-click & drag -> Zoom in/out') print('SHIFT+left-click & drag -> Pan') print() print('Keybinds:') print('---------') print('l -> toggle light') print('t/g -> stretch/compress z-dimension') print('h -> print help message') print('q -> close window') print() spectral-0.22.4/spectral/graphics/ndwindow.py000066400000000000000000001236321412674721200212510ustar00rootroot00000000000000''' Code to display N-dimensional data sets in 3D using OpenGL. ''' from __future__ import absolute_import, division, print_function, unicode_literals import math import numpy as np import os from pprint import pprint import random import time try: import wx from wx import glcanvas except ImportError: raise ImportError("Required dependency wx.glcanvas not present") from .. import settings from ..config import spy_colors from .colorscale import ColorScale from .spypylab import ImageView, MplCallback, SpyMplEvent from .graphics import WindowProxy DEFAULT_WIN_SIZE = (500, 500) # Default dimensions of image frame def rtp_to_xyz(r, theta, phi): '''Convert spherical polar coordinates to Cartesian''' theta *= math.pi / 180.0 phi *= math.pi / 180.0 s = r * math.sin(theta) return [s * math.cos(phi), s * math.sin(phi), r * math.cos(theta)] def xyz_to_rtp(x, y, z): '''Convert Cartesian coordinates to Spherical Polar.''' r = math.sqrt(x * x + y * y + z * z) rho = math.sqrt(x * x + y * y) phi = math.asin(y / rho) * 180. / math.pi if x < 0.0: phi += 180 theta = math.acos(z / r) * 180. / math.pi return [r, theta, phi] (DOWN, UP) = (1, 0) class MouseHandler: '''A class to enable rotate/zoom functions in an OpenGL window.''' MAX_BUTTONS = 10 def __init__(self, window): self.window = window self.position = None self.event_position = None self.left = UP self.right = UP self.middle = UP self.mode = 'DEFAULT' def left_down(self, event): self.position = (event.X, event.Y) self.left = DOWN if self.mode == 'DEFAULT': if wx.GetKeyState(wx.WXK_CONTROL) and wx.GetKeyState(wx.WXK_SHIFT): # Display the row/col and class of the selected pixel. (x, y) = self.position cmd = lambda: self.window.get_pixel_info( x, self.window.size[1] - y) self.window.add_display_command(cmd) self.window.canvas.SetCurrent(self.window.canvas.context) self.window.canvas.Refresh() elif wx.GetKeyState(wx.WXK_SHIFT): # Switch to box selection mode. print('IN BOX SELECTION MODE.') self.mode = 'BOX_SELECT' elif wx.GetKeyState(wx.WXK_CONTROL): # Switch to zoom mode. self.mode = 'ZOOMING' self.event_position = (event.X, event.Y) event.Skip() def left_up(self, event): self.position = (event.X, event.Y) self.left = UP if self.mode == 'BOX_SELECT': self.update_box_coordinates() # Box selection ends when the button is released. if wx.GetKeyState(wx.WXK_SHIFT): print('BOX HAS BEEN SELECTED.') self.mode = 'DEFAULT' else: # Shift key was released before box selection completed. print('BOX SELECTION CANCELLED.') self.window._selection_box = None self.window.canvas.SetCurrent(self.window.canvas.context) self.window.canvas.Refresh() elif self.mode == 'ZOOMING': self.mode = 'DEFAULT' self.event_position = (event.X, event.Y) event.Skip() def motion(self, event): '''Handles panning & zooming for mouse click+drag events.''' if DOWN not in (self.left, self.right): return #print 'Mouse movement:', x, y (w, h) = self.window.size dx = event.X - self.position[0] dy = event.Y - self.position[1] if self.mode == 'DEFAULT': if self.left == DOWN and not self.window.mouse_panning: # Mouse movement creates a rotation about the target position xangle = 2.0 * self.window.fovy * float(dx) / h yangle = 2.0 * self.window.fovy * float(dy) / h rtp = self.window.camera_pos_rtp rtp[1] = min(max(rtp[1] - yangle, 0.05), 179.95) self.window.camera_pos_rtp[2] -= xangle elif self.left == DOWN: # Mouse movement pans target position in the plane of window camera_pos = np.array(rtp_to_xyz(*self.window.camera_pos_rtp)) view_vec = -np.array(rtp_to_xyz(*self.window.camera_pos_rtp)) zhat = np.array([0.0, 0.0, 1.0]) right = -np.cross(zhat, view_vec) right /= np.sum(np.square(right)) up = np.cross(right, view_vec) up /= np.sum(np.square(up)) dr = right * (4.0 * dx / w) du = up * (4.0 * dy / h) self.window.target_pos += du - dr elif self.mode == 'ZOOMING': # Mouse movement zooms in/out relative to target position if dx != 0.0: self.window.camera_pos_rtp[0] *= (float(w - dx) / w) elif self.mode == 'BOX_SELECT': self.update_box_coordinates() self.position = (event.X, event.Y) self.window.Refresh() event.Skip() def update_box_coordinates(self): xmin = min(self.event_position[0], self.position[0]) xmax = max(self.event_position[0], self.position[0]) ymin = min(self.event_position[1], self.position[1]) ymax = max(self.event_position[1], self.position[1]) R = self.window.size[1] self.window._selection_box = (xmin, R - ymax, xmax, R - ymin) class MouseMenu(wx.Menu): '''Right-click menu for reassigning points to different classes.''' ids = [] def __init__(self, window): super(MouseMenu, self).__init__(title='Assign to class') self.window = window self.id_classes = {} while len(self.ids) < self.window.max_menu_class + 1: self.ids.append( wx.NewId()) for i in range(self.window.max_menu_class + 1): id = self.ids[i] self.id_classes[id] = i print('(id, i) =', (id, i)) mi = wx.MenuItem(self, id, str(i)) self.AppendItem(mi) self.Bind(wx.EVT_MENU, self.reassign_points, mi) def reassign_points(self, event): i = self.id_classes[event.GetId()] self.window.post_reassign_selection(i) # Multipliers for projecting data into each 3D octant octant_coeffs = np.array([ [1, 1, 1], [-1, 1, 1], [-1, -1, 1], [1, -1, 1], [1, 1, -1], [-1, 1, -1], [-1, -1, -1], [1, -1, -1]], float) def create_mirrored_octants(feature_indices): '''Takes a list of 6 integers and returns 8 lists of feature index triplets. The 6 indices passed each specify a feature to be associatd with a semi-axis in the 3D display. Each of the 8 returned triplets specifies the 3 features associated with particular octant, starting with the positive x,y,z octant, proceding counterclockwise around the z-axis then similarly for the negative half of the z-axis. ''' f = feature_indices octants = [ [f[0], f[1], f[2]], [f[3], f[1], f[2]], [f[3], f[4], f[2]], [f[0], f[4], f[2]], [f[0], f[1], f[5]], [f[3], f[1], f[5]], [f[3], f[4], f[5]], [f[0], f[4], f[5]]] return octants def random_subset(sequence, nsamples): '''Returns a list of `nsamples` unique random elements from `sequence`.''' if len(sequence) < nsamples: raise Exception('Sequence in random_triplet must have at least ' + '3 elements.') triplet = [random.choice(sequence) for i in range(nsamples)] while len(set(triplet)) != nsamples: triplet = [random.choice(sequence) for i in range(nsamples)] return triplet class NDWindowProxy(WindowProxy): '''A proxy class to retrieve data from an NDWindow. An instance contains the following members: `classes` (ndarray): The current class labels associated with the NDWindow data. `set_features` ((list, string)): List of features and display mode (see set_features doc string.) ''' def __init__(self, window): WindowProxy.__init__(self, window) self._classes = window.classes @property def classes(self): '''Returns the current class labels associated with data points.''' return self._classes def set_features(self, *args, **kwargs): '''Specifies which features to display in the 3D window. Arguments: `features` (list or list of integer lists): This keyword specifies which bands/features from `data` should be displayed in the 3D window. It must be defined as one of the following: #. If `mode` is set to "single" (the default), then `features` must be a length-3 list of integer feature IDs. In this case, the data points will be displayed in the positive x,y,z octant using features associated with the 3 integers. #. If `mode` is set to "mirrored", then `features` must be a length-6 list of integer feature IDs. In this case, each integer specifies a single feature index to be associated with the coordinate semi-axes x, y, z, -x, -y, and -z (in that order). Each octant will display data points using the features associated with the 3 semi-axes for that octant. #. If `mode` is set to "independent", then `features` must be a length-8 list of length-3 lists of integers. In this case, each length-3 list specfies the features to be displayed in a single octants (the same semi-axis can be associated with different features in different octants). Octants are ordered starting with the postive x,y,z octant and procede counterclockwise around the z-axis, then procede similarly around the negative half of the z-axis. An octant triplet can be specified as None instead of a list, in which case nothing will be rendered in that octant. `mode` (string, default="single") The display mode for the 3D octants. This value must be "single", "mirrored", or "independent". ''' if not isinstance(self._window, wx.Frame): raise Exception('The window no longer exists.') self._window.set_features(*args, **kwargs) def view_class_image(self, *args, **kwargs): '''Show a dynamically updated view of image class values. The class IDs displayed are those currently associated with the ND window. `args` and `kwargs` are additional arguments passed on to the `ImageView` constructor. Return value is the ImageView object. ''' return self._window.view_class_image(*args, **kwargs) class NDWindow(wx.Frame): '''A widow class for displaying N-dimensional data points.''' def __init__(self, data, parent, id, *args, **kwargs): global DEFAULT_WIN_SIZE self.kwargs = kwargs self.size = kwargs.get('size', DEFAULT_WIN_SIZE) self.title = kwargs.get('title', 'ND Window') # # Forcing a specific style on the window. # Should this include styles passed? style = wx.DEFAULT_FRAME_STYLE | wx.NO_FULL_REPAINT_ON_RESIZE super(NDWindow, self).__init__(parent, id, self.title, wx.DefaultPosition, wx.Size(*self.size), style, self.title) self.gl_initialized = False attribs = (glcanvas.WX_GL_RGBA, glcanvas.WX_GL_DOUBLEBUFFER, glcanvas.WX_GL_DEPTH_SIZE, settings.WX_GL_DEPTH_SIZE) self.canvas = glcanvas.GLCanvas(self, attribList=attribs) self.canvas.context = wx.glcanvas.GLContext(self.canvas) self._have_glut = False self.clear_color = (0, 0, 0, 0) self.show_axes_tf = True self.point_size = 1.0 self._show_unassigned = True self._refresh_display_lists = False self._click_tolerance = 1 self._display_commands = [] self._selection_box = None self._rgba_indices = None self.mouse_panning = False self.win_pos = (100, 100) self.fovy = 60. self.znear = 0.1 self.zfar = 10.0 self.target_pos = [0.0, 0.0, 0.0] self.camera_pos_rtp = [7.0, 45.0, 30.0] self.up = [0.0, 0.0, 1.0] self.quadrant_mode = None self.mouse_handler = MouseHandler(self) # Set the event handlers. self.canvas.Bind(wx.EVT_ERASE_BACKGROUND, self.on_erase_background) self.Bind(wx.EVT_SIZE, self.on_resize) self.canvas.Bind(wx.EVT_PAINT, self.on_paint) self.canvas.Bind(wx.EVT_LEFT_DOWN, self.mouse_handler.left_down) self.canvas.Bind(wx.EVT_LEFT_UP, self.mouse_handler.left_up) self.canvas.Bind(wx.EVT_MOTION, self.mouse_handler.motion) self.canvas.Bind(wx.EVT_CHAR, self.on_char) self.canvas.Bind(wx.EVT_RIGHT_DOWN, self.right_click) self.canvas.Bind(wx.EVT_CLOSE, self.on_event_close) self.data = data self.classes = kwargs.get('classes', np.zeros(data.shape[:-1], int)) self.features = kwargs.get('features', list(range(6))) self.labels = kwargs.get('labels', list(range(data.shape[-1]))) self.max_menu_class = int(np.max(self.classes.ravel() + 1)) from matplotlib.cbook import CallbackRegistry self.callbacks = CallbackRegistry() def on_event_close(self, event=None): pass def right_click(self, event): self.canvas.SetCurrent(self.canvas.context) self.canvas.PopupMenu(MouseMenu(self), event.GetPosition()) def add_display_command(self, cmd): '''Adds a command to be called next time `display` is run.''' self._display_commands.append(cmd) def reset_view_geometry(self): '''Sets viewing geometry to the default view.''' # All grid points will be adjusted to the range [0,1] so this # is a reasonable center coordinate for the scene self.target_pos = np.array([0.0, 0.0, 0.0]) # Specify the camera location in spherical polar coordinates relative # to target_pos. self.camera_pos_rtp = [2.5, 45.0, 30.0] def set_data(self, data, **kwargs): '''Associates N-D point data with the window. ARGUMENTS: data (numpy.ndarray): An RxCxB array of data points to display. KEYWORD ARGUMENTS: classes (numpy.ndarray): An RxC array of integer class labels (zeros means unassigned). features (list): Indices of feautures to display in the octant (see NDWindow.set_octant_display_features for description). ''' import OpenGL.GL as gl try: from OpenGL.GL import glGetIntegerv except: from OpenGL.GL.glget import glGetIntegerv classes = kwargs.get('classes', None) features = kwargs.get('features', list(range(6))) if self.data.shape[2] < 6: features = features[:3] self.quadrant_mode == 'single' # Scale the data set to span an octant data2d = np.array(data.reshape((-1, data.shape[-1]))) mins = np.min(data2d, axis=0) maxes = np.max(data2d, axis=0) denom = (maxes - mins).astype(float) denom = np.where(denom > 0, denom, 1.0) self.data = (data2d - mins) / denom self.data.shape = data.shape self.palette = spy_colors.astype(float) / 255. self.palette[0] = np.array([1.0, 1.0, 1.0]) self.colors = self.palette[self.classes.ravel()].reshape( self.data.shape[:2] + (3,)) self.colors = (self.colors * 255).astype('uint8') colors = np.ones((self.colors.shape[:-1]) + (4,), 'uint8') colors[:, :, :-1] = self.colors self.colors = colors self._refresh_display_lists = True self.set_octant_display_features(features) # Determine the bit masks to use when using RGBA components for # identifying pixel IDs. components = [gl.GL_RED_BITS, gl.GL_GREEN_BITS, gl.GL_GREEN_BITS, gl.GL_ALPHA_BITS] self._rgba_bits = [min(8, glGetIntegerv(i)) for i in components] self._low_bits = [min(8, 8 - self._rgba_bits[i]) for i in range(4)] self._rgba_masks = \ [(2**self._rgba_bits[i] - 1) << (8 - self._rgba_bits[i]) for i in range(4)] # Determine how many times the scene will need to be rendered in the # background to extract the pixel's row/col index. N = self.data.shape[0] * self.data.shape[1] if N > 2**sum(self._rgba_bits): raise Exception('Insufficient color bits (%d) for N-D window display' % sum(self._rgba_bits)) self.reset_view_geometry() def set_octant_display_features(self, features): '''Specifies features to be displayed in each 3-D coordinate octant. `features` can be any of the following: A length-3 list of integer feature IDs: In this case, the data points will be displayed in the positive x,y,z octant using features associated with the 3 integers. A length-6 list if integer feature IDs: In this case, each integer specifies a single feature index to be associated with the coordinate semi-axes x, y, z, -x, -y, and -z (in that order). Each octant will display data points using the features associated with the 3 semi-axes for that octant. A length-8 list of length-3 lists of integers: In this case, each length-3 list specfies the features to be displayed in a single octants (the same semi-axis can be associated with different features in different octants). Octants are ordered starting with the postive x,y,z octant and procede counterclockwise around the z-axis, then procede similarly around the negative half of the z-axis. An octant triplet can be specified as None instead of a list, in which case nothing will be rendered in that octant. ''' if features is None: features = list(range(6)) if len(features) == 3: self.octant_features = [features] + [None] * 7 new_quadrant_mode = 'single' self.target_pos = np.array([0.5, 0.5, 0.5]) elif len(features) == 6: self.octant_features = create_mirrored_octants(features) new_quadrant_mode = 'mirrored' self.target_pos = np.array([0.0, 0.0, 0.0]) else: self.octant_features = features new_quadrant_mode = 'independent' self.target_pos = np.array([0.0, 0.0, 0.0]) if new_quadrant_mode != self.quadrant_mode: print('Setting quadrant display mode to %s.' % new_quadrant_mode) self.quadrant_mode = new_quadrant_mode self._refresh_display_lists = True def create_display_lists(self, npass=-1, **kwargs): '''Creates or updates the display lists for image data. ARGUMENTS: `npass` (int): When defaulted to -1, the normal image data display lists are created. When >=0, `npass` represents the rendering pass for identifying image pixels in the scene by their unique colors. KEYWORD ARGS: `indices` (list of ints): An optional list of N-D image pixels to display. ''' import OpenGL.GL as gl gl.glEnableClientState(gl.GL_COLOR_ARRAY) gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glPointSize(self.point_size) gl.glColorPointerub(self.colors) (R, C, B) = self.data.shape indices = kwargs.get('indices', None) if indices is None: indices = np.arange(R * C) if not self._show_unassigned: indices = indices[self.classes.ravel() != 0] self._display_indices = indices # RGB pixel indices for selecting pixels with the mouse gl.glPointSize(self.point_size) if npass < 0: # Colors are associated with image pixel classes. gl.glColorPointerub(self.colors) else: if self._rgba_indices is None: # Generate unique colors that correspond to each pixel's ID # so that the color can be used to identify the pixel. color_indices = np.arange(R * C) rgba = np.zeros((len(color_indices), 4), 'uint8') for i in range(4): shift = sum(self._rgba_bits[0:i]) - self._low_bits[i] if shift > 0: rgba[:, i] = ( color_indices >> shift) & self._rgba_masks[i] else: rgba[:, i] = (color_indices << self._low_bits[i]) \ & self._rgba_masks[i] self._rgba_indices = rgba gl.glColorPointerub(self._rgba_indices) # Generate a display list for each octant of the 3-D window. for (i, octant) in enumerate(self.octant_features): if octant is not None: data = np.take(self.data, octant, axis=2).reshape((-1, 3)) data *= octant_coeffs[i] gl.glVertexPointerf(data) gl.glNewList(self.gllist_id + i + 1, gl.GL_COMPILE) gl.glDrawElementsui(gl.GL_POINTS, indices) gl.glEndList() else: # Create an empty draw list gl.glNewList(self.gllist_id + i + 1, gl.GL_COMPILE) gl.glEndList() self.create_axes_list() self._refresh_display_lists = False def randomize_features(self): '''Randomizes data features displayed using current display mode.''' ids = list(range(self.data.shape[2])) if self.quadrant_mode == 'single': features = random_subset(ids, 3) elif self.quadrant_mode == 'mirrored': features = random_subset(ids, 6) else: features = [random_subset(ids, 3) for i in range(8)] print('New feature IDs:') pprint(np.array(features)) self.set_octant_display_features(features) def set_features(self, features, mode='single'): if mode == 'single': if len(features) != 3: raise Exception( 'Expected 3 feature indices for "single" mode.') elif mode == 'mirrored': if len(features) != 6: raise Exception( 'Expected 6 feature indices for "mirrored" mode.') elif mode == 'independent': if len(features) != 8: raise Exception('Expected 8 3-tuples of feature indices for' '"independent" mode.') else: raise Exception('Unrecognized feature mode: %s.' % str(mode)) print('New feature IDs:') pprint(np.array(features)) self.set_octant_display_features(features) self.Refresh() def draw_box(self, x0, y0, x1, y1): '''Draws a selection box in the 3-D window. Coordinates are with respect to the lower left corner of the window. ''' import OpenGL.GL as gl gl.glMatrixMode(gl.GL_PROJECTION) gl.glLoadIdentity() gl.glOrtho(0.0, self.size[0], 0.0, self.size[1], -0.01, 10.0) gl.glLineStipple(1, 0xF00F) gl.glEnable(gl.GL_LINE_STIPPLE) gl.glLineWidth(1.0) gl.glColor3f(1.0, 1.0, 1.0) gl.glBegin(gl.GL_LINE_LOOP) gl.glVertex3f(x0, y0, 0.0) gl.glVertex3f(x1, y0, 0.0) gl.glVertex3f(x1, y1, 0.0) gl.glVertex3f(x0, y1, 0.0) gl.glEnd() gl.glDisable(gl.GL_LINE_STIPPLE) gl.glFlush() self.resize(*self.size) def on_paint(self, event): '''Renders the entire scene.''' import OpenGL.GL as gl import OpenGL.GLU as glu self.canvas.SetCurrent(self.canvas.context) if not self.gl_initialized: self.initgl() self.gl_initialized = True self.print_help() self.resize(*self.size) gl.glMatrixMode(gl.GL_MODELVIEW) gl.glLoadIdentity() gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) while len(self._display_commands) > 0: self._display_commands.pop(0)() if self._refresh_display_lists: self.create_display_lists() gl.glPushMatrix() # camera_pos_rtp is relative to target position. To get the absolute # camera position, we need to add the target position. camera_pos_xyz = np.array(rtp_to_xyz(*self.camera_pos_rtp)) \ + self.target_pos glu.gluLookAt( *(list(camera_pos_xyz) + list(self.target_pos) + self.up)) if self.show_axes_tf: gl.glCallList(self.gllist_id) self.draw_data_set() gl.glPopMatrix() gl.glFlush() if self._selection_box is not None: self.draw_box(*self._selection_box) self.SwapBuffers() event.Skip() def post_reassign_selection(self, new_class): '''Reassigns pixels in selection box during the next rendering loop. ARGUMENT: `new_class` (int): The class to which the pixels in the box will be assigned. ''' if self._selection_box is None: msg = 'Bounding box is not selected. Hold SHIFT and click & ' + \ 'drag with the left\nmouse button to select a region.' print(msg) return 0 self.add_display_command(lambda: self.reassign_selection(new_class)) self.canvas.Refresh() return 0 def reassign_selection(self, new_class): '''Reassigns pixels in the selection box to the specified class. This method should only be called from the `display` method. Pixels are reassigned by identifying each pixel in the 3D display by their unique color, then reassigning them. Since pixels can block others in the z-buffer, this method iteratively reassigns pixels by removing any reassigned pixels from the display list, then reassigning again, repeating until there are no more pixels in the selction box. ''' nreassigned_tot = 0 i = 1 print('Reassigning points', end=' ') while True: indices = np.array(self._display_indices) classes = np.array(self.classes.ravel()[indices]) indices = indices[np.where(classes != new_class)] ids = self.get_points_in_selection_box(indices=indices) cr = self.classes.ravel() nreassigned = np.sum(cr[ids] != new_class) nreassigned_tot += nreassigned cr[ids] = new_class new_color = np.zeros(4, 'uint8') new_color[:3] = (np.array(self.palette[new_class]) * 255).astype('uint8') self.colors.reshape((-1, 4))[ids] = new_color self.create_display_lists() if len(ids) == 0: break # print 'Pass %d: %d points reassigned to class %d.' \ # % (i, nreassigned, new_class) print('.', end=' ') i += 1 print('\n%d points were reasssigned to class %d.' \ % (nreassigned_tot, new_class)) self._selection_box = None if nreassigned_tot > 0 and new_class == self.max_menu_class: self.max_menu_class += 1 if nreassigned_tot > 0: event = SpyMplEvent('spy_classes_modified') event.classes = self.classes event.nchanged = nreassigned_tot self.callbacks.process('spy_classes_modified', event) return nreassigned_tot def get_points_in_selection_box(self, **kwargs): '''Returns pixel IDs of all points in the current selection box. KEYWORD ARGS: `indices` (ndarray of ints): An alternate set of N-D image pixels to display. Pixels are identified by performing a background rendering loop wherein each pixel is rendered with a unique color. Then, glReadPixels is used to read colors of pixels in the current selection box. ''' import OpenGL.GL as gl indices = kwargs.get('indices', None) point_size_temp = self.point_size self.point_size = kwargs.get('point_size', 1) xsize = self._selection_box[2] - self._selection_box[0] + 1 ysize = self._selection_box[3] - self._selection_box[1] + 1 ids = np.zeros(xsize * ysize, int) self.create_display_lists(0, indices=indices) self.render_rgb_indexed_colors() gl.glPixelStorei(gl.GL_UNPACK_ALIGNMENT, 1) pixels = gl.glReadPixelsub(self._selection_box[0], self._selection_box[1], xsize, ysize, gl.GL_RGBA) pixels = np.frombuffer(pixels, dtype=np.uint8).reshape((ysize, xsize, 4)) for i in range(4): component = pixels[:, :, i].reshape((xsize * ysize,)) \ & self._rgba_masks[i] shift = (sum(self._rgba_bits[0:i]) - self._low_bits[i]) if shift > 0: ids += component.astype(int) << shift else: ids += component.astype(int) >> (-shift) points = ids[ids > 0] self.point_size = point_size_temp gl.glMatrixMode(gl.GL_MODELVIEW) gl.glLoadIdentity() gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) self._refresh_display_lists = True return points def get_pixel_info(self, x, y, **kwargs): '''Prints row/col of the pixel at the given raster position. ARGUMENTS: `x`, `y`: (int): The pixel's coordinates relative to the lower left corner. ''' self._selection_box = (x, y, x, y) ids = self.get_points_in_selection_box(point_size=self.point_size) for id in ids: if id > 0: rc = self.index_to_image_row_col(id) print('Pixel %d %s has class %s.' % (id, rc, self.classes[rc])) return def render_rgb_indexed_colors(self, **kwargs): '''Draws scene in the background buffer to extract mouse click info''' import OpenGL.GL as gl import OpenGL.GLU as glu gl.glMatrixMode(gl.GL_MODELVIEW) gl.glLoadIdentity() gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) # camera_pos_rtp is relative to the target position. To get the # absolute camera position, we need to add the target position. gl.glPushMatrix() camera_pos_xyz = np.array(rtp_to_xyz(*self.camera_pos_rtp)) \ + self.target_pos glu.gluLookAt( *(list(camera_pos_xyz) + list(self.target_pos) + self.up)) self.draw_data_set() gl.glPopMatrix() gl.glFlush() def index_to_image_row_col(self, index): '''Converts the unraveled pixel ID to row/col of the N-D image.''' rowcol = (index // self.data.shape[1], index % self.data.shape[1]) return rowcol def draw_data_set(self): '''Draws the N-D data set in the scene.''' import OpenGL.GL as gl for i in range(1, 9): gl.glCallList(self.gllist_id + i) def create_axes_list(self): '''Creates display lists to render unit length x,y,z axes.''' import OpenGL.GL as gl gl.glNewList(self.gllist_id, gl.GL_COMPILE) gl.glBegin(gl.GL_LINES) gl.glColor3f(1.0, 0.0, 0.0) gl.glVertex3f(0.0, 0.0, 0.0) gl.glVertex3f(1.0, 0.0, 0.0) gl.glColor3f(0.0, 1.0, 0.0) gl.glVertex3f(0.0, 0.0, 0.0) gl.glVertex3f(0.0, 1.0, 0.0) gl.glColor3f(-.0, 0.0, 1.0) gl.glVertex3f(0.0, 0.0, 0.0) gl.glVertex3f(0.0, 0.0, 1.0) gl.glColor3f(1.0, 1.0, 1.0) gl.glVertex3f(0.0, 0.0, 0.0) gl.glVertex3f(-1.0, 0.0, 0.0) gl.glVertex3f(0.0, 0.0, 0.0) gl.glVertex3f(0.0, -1.0, 0.0) gl.glVertex3f(0.0, 0.0, 0.0) gl.glVertex3f(0.0, 0.0, -1.0) gl.glEnd() def label_axis(x, y, z, label): gl.glRasterPos3f(x, y, z) glut.glutBitmapString(glut.GLUT_BITMAP_HELVETICA_18, str(label)) def label_axis_for_feature(x, y, z, feature_ind): feature = self.octant_features[feature_ind[0]][feature_ind[1]] label_axis(x, y, z, self.labels[feature]) if self._have_glut: try: import OpenGL.GLUT as glut if bool(glut.glutBitmapString): if self.quadrant_mode == 'independent': label_axis(1.05, 0.0, 0.0, 'x') label_axis(0.0, 1.05, 0.0, 'y') label_axis(0.0, 0.0, 1.05, 'z') elif self.quadrant_mode == 'mirrored': label_axis_for_feature(1.05, 0.0, 0.0, (0, 0)) label_axis_for_feature(0.0, 1.05, 0.0, (0, 1)) label_axis_for_feature(0.0, 0.0, 1.05, (0, 2)) label_axis_for_feature(-1.05, 0.0, 0.0, (6, 0)) label_axis_for_feature(0.0, -1.05, 0.0, (6, 1)) label_axis_for_feature(0.0, 0.0, -1.05, (6, 2)) else: label_axis_for_feature(1.05, 0.0, 0.0, (0, 0)) label_axis_for_feature(0.0, 1.05, 0.0, (0, 1)) label_axis_for_feature(0.0, 0.0, 1.05, (0, 2)) except: pass gl.glEndList() def GetGLExtents(self): """Get the extents of the OpenGL canvas.""" return def SwapBuffers(self): """Swap the OpenGL buffers.""" self.canvas.SwapBuffers() def on_erase_background(self, event): """Process the erase background event.""" pass # Do nothing, to avoid flashing on MSWin def initgl(self): '''App-specific initialization for after GLUT has been initialized.''' import OpenGL.GL as gl self.gllist_id = gl.glGenLists(9) gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_COLOR_ARRAY) gl.glDisable(gl.GL_LIGHTING) gl.glDisable(gl.GL_TEXTURE_2D) gl.glDisable(gl.GL_FOG) gl.glDisable(gl.GL_COLOR_MATERIAL) gl.glEnable(gl.GL_DEPTH_TEST) gl.glShadeModel(gl.GL_FLAT) self.set_data(self.data, classes=self.classes, features=self.features) try: import OpenGL.GLUT as glut glut.glutInit() self._have_glut = True except: pass def on_resize(self, event): '''Process the resize event.''' # For wx versions 2.9.x, GLCanvas.GetContext() always returns None, # whereas 2.8.x will return the context so test for both versions. if wx.VERSION >= (2, 9) or self.canvas.GetContext(): self.canvas.SetCurrent(self.canvas.context) # Make sure the frame is shown before calling SetCurrent. self.Show() size = event.GetSize() self.resize(size.width, size.height) self.canvas.Refresh(False) event.Skip() def resize(self, width, height): """Reshape the OpenGL viewport based on dimensions of the window.""" import OpenGL.GL as gl import OpenGL.GLU as glu self.size = (width, height) gl.glViewport(0, 0, width, height) gl.glMatrixMode(gl.GL_PROJECTION) gl.glLoadIdentity() glu.gluPerspective(self.fovy, float(width) / height, self.znear, self.zfar) gl.glMatrixMode(gl.GL_MODELVIEW) gl.glLoadIdentity() def on_char(self, event): '''Callback function for when a keyboard button is pressed.''' key = chr(event.GetKeyCode()) # See `print_help` method for explanation of keybinds. if key == 'a': self.show_axes_tf = not self.show_axes_tf elif key == 'c': self.view_class_image() elif key == 'd': if self.data.shape[2] < 6: print('Only single-quadrant mode is supported for %d features.' % \ self.data.shape[2]) return if self.quadrant_mode == 'single': self.quadrant_mode = 'mirrored' elif self.quadrant_mode == 'mirrored': self.quadrant_mode = 'independent' else: self.quadrant_mode = 'single' print('Setting quadrant display mode to %s.' % self.quadrant_mode) self.randomize_features() elif key == 'f': self.randomize_features() elif key == 'h': self.print_help() elif key == 'm': self.mouse_panning = not self.mouse_panning elif key == 'p': self.point_size += 1 self._refresh_display_lists = True elif key == 'P': self.point_size = max(self.point_size - 1, 1.0) self._refresh_display_lists = True elif key == 'q': self.on_event_close() self.Close(True) elif key == 'r': self.reset_view_geometry() elif key == 'u': self._show_unassigned = not self._show_unassigned print('SHOW UNASSIGNED =', self._show_unassigned) self._refresh_display_lists = True self.canvas.Refresh() def update_window_title(self): '''Prints current file name and current point color to window title.''' from OpenGL.GLUT import glutSetWindowTitle s = 'SPy N-D Data Set' glutSetWindowTitle(s) def get_proxy(self): '''Returns a proxy object to access data from the window.''' return NDWindowProxy(self) def view_class_image(self, *args, **kwargs): '''Opens a dynamic raster image of class values. The class IDs displayed are those currently associated with the ND window. `args` and `kwargs` are additional arguments passed on to the `ImageView` constructor. Return value is the ImageView object. ''' view = ImageView(classes=self.classes, *args, **kwargs) view.callbacks_common = self.callbacks view.show() return view def print_help(self): '''Prints a list of accepted keyboard/mouse inputs.''' print('''Mouse functions: --------------- Left-click & drag --> Rotate viewing geometry (or pan) CTRL+Left-click & drag --> Zoom viewing geometry CTRL+SHIFT+Left-click --> Print image row/col and class of selected pixel SHIFT+Left-click & drag --> Define selection box in the window Right-click --> Open GLUT menu for pixel reassignment Keyboard functions: ------------------- a --> Toggle axis display c --> View dynamic raster image of class values d --> Cycle display mode between single-quadrant, mirrored octants, and independent octants (display will not change until features are randomzed again) f --> Randomize features displayed h --> Print this help message m --> Toggle mouse function between rotate/zoom and pan modes p/P --> Increase/Decrease the size of displayed points q --> Exit the application r --> Reset viewing geometry u --> Toggle display of unassigned points (points with class == 0) ''') def validate_args(data, *args, **kwargs): '''Validates arguments to the `ndwindow` function.''' if not isinstance(data, np.ndarray): raise TypeError('`data` argument must be a numpy ndarray.') if len(data.shape) != 3: raise ValueError('`data` argument must have 3 dimensions.') if data.shape[2] < 3: raise ValueError('`data` argument must have at least 3 values along' + ' third dimension.') if 'classes' in kwargs: classes = kwargs['classes'] if classes.shape != data.shape[:2]: raise ValueError('`classes` keyword argument shape does not match' ' `data` argument shape.') if 'features' in kwargs: features = kwargs['features'] if type(features) not in (list, tuple): raise TypeError('`features` keyword must be a list or tuple.') if len(features) in (3, 6): if max(features) >= data.shape[2]: raise ValueError('Feature index exceeds max for data array.') elif len(features) == 8: for octant in features: if type(octant) not in (list, tuple, type(None)): raise TypeError('Each octant in `features` keyword must' + 'be a list/tuple of 3 ints or None.') if type(octant) not in (list, tuple) and len(octant) != 3: raise TypeError('Each octant in the `features` keyword ' + 'must be a list/tuple of exactly 3 ints.') if max(octant) >= data.shape[2]: raise ValueError( 'Feature index exceeds max for data array.') else: raise ValueError( 'Invalid number of elements in `features` keyword.') if 'size' in kwargs: size = kwargs['size'] if type(size) not in (list, tuple) or len(size) != 2: raise ValueError( '`size` keyword must be a list/tuple of two ints.') for n in size: if type(n) != int: raise TypeError('`size` keyword must contain two ints.') if n < 1: raise ValueError('Invalid window size specification.') if 'title' in kwargs and type(kwargs['title']) != str: raise TypeError('Invalide window title specification.') spectral-0.22.4/spectral/graphics/rasterwindow.py000066400000000000000000000036241412674721200221460ustar00rootroot00000000000000''' Code for raster displays using wxPython. ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging import wx from spectral.graphics.graphics import SpyWindow logger = logging.getLogger('spectral') class RasterWindow(wx.Frame, SpyWindow): ''' RasterWindow is the primary wxWindows object for displaying SPy images. The frames also handle left double-click events by displaying an x-y plot of the spectrum for the associated pixel. ''' def __init__(self, parent, index, rgb, **kwargs): if 'title' in kwargs: title = kwargs['title'] else: title = 'SPy Image' # wxFrame.__init__(self, parent, index, "SPy Frame") # wxScrolledWindow.__init__(self, parent, index, style = wxSUNKEN_BORDER) img = wx.EmptyImage(rgb.shape[0], rgb.shape[1]) img = wx.EmptyImage(rgb.shape[1], rgb.shape[0]) img.SetData(rgb.tostring()) self.bmp = img.ConvertToBitmap() self.kwargs = kwargs wx.Frame.__init__(self, parent, index, title, wx.DefaultPosition) self.SetClientSizeWH(self.bmp.GetWidth(), self.bmp.GetHeight()) wx.EVT_PAINT(self, self.on_paint) wx.EVT_LEFT_DCLICK(self, self.left_double_click) def on_paint(self, e): dc = wx.PaintDC(self) self.paint(dc) def paint(self, dc): dc.BeginDrawing() dc.DrawBitmap(self.bmp, 0, 0) # dc.Blit(0,0, bmp.GetWidth(), bmp.GetHeight(), mDC, 0, 0) dc.EndDrawing() def left_double_click(self, evt): from spectral import settings if "data source" in self.kwargs: logger.info('{}'.format((evt.GetY(), evt.GetX()))), settings.plotter.plot(self.kwargs["data source"], [evt.GetY(), evt.GetX()], source=self.kwargs["data source"]) spectral-0.22.4/spectral/graphics/spygnuplot.py000066400000000000000000000017541412674721200216440ustar00rootroot00000000000000''' A module to use Gnuplot for creating x-y plots of pixel spectra. ''' from __future__ import absolute_import, division, print_function, unicode_literals import Gnuplot xyplot = Gnuplot.Gnuplot() def plot(data, source=None): ''' Creates an x-y plot. USAGE: plot(data) If data is a vector, all the values in data will be drawn in a single series. If data is a 2D array, each column of data will be drawn as a separate series. ''' from numpy import shape global xyplot g = Gnuplot.Gnuplot() g('set style data lines') g('set grid') s = shape(data) if len(s) == 1: # plot a vector g('set xrange [0: %d]' % s[0]) g.plot(Gnuplot.Data(list(range(s[0])), data)) elif len(s) == 2: xvals = list(range(s[1])) g('set xrange [0: %d]' % s[1]) g.plot(Gnuplot.Data(xvals, data[0, :])) for i in range(1, s[0]): g.replot(Gnuplot.Data(xvals, data[i, :])) xyplot = g return g spectral-0.22.4/spectral/graphics/spypylab.py000066400000000000000000001356341412674721200212700ustar00rootroot00000000000000''' Code to use matplotlib for creating raster and spectral views. ''' from __future__ import absolute_import, division, print_function, unicode_literals __all__ = ['ImageView', 'imshow'] import numpy as np import warnings _mpl_callbacks_checked = False def check_disable_mpl_callbacks(): '''Disables matplotlib key event handlers, if appropriate.''' import matplotlib as mpl from spectral import settings global _mpl_callbacks_checked if _mpl_callbacks_checked is True or \ settings.imshow_disable_mpl_callbacks is False: return _mpl_callbacks_checked = True mpl.rcParams['keymap.back'] = '' mpl.rcParams['keymap.xscale'] = '' mpl.rcParams['keymap.yscale'] = '' mpl.rcParams['keymap.home'] = 'r' def xy_to_rowcol(x, y): '''Converts image (x, y) coordinate to pixel (row, col).''' return (int(y + 0.5), int(x + 0.5)) def rowcol_to_xy(r, c): '''Converts pixel (row, col) coordinate to (x, y) of pixel center.''' return (float(c), float(r)) class MplCallback(object): '''Base class for callbacks using matplotlib's CallbackRegistry. Behavior of MplCallback objects can be customized by providing a callable object to the constructor (or `connect` method) or by defining a `handle_event` method in a subclass. ''' # If the following class attribute is False, callbacks will silently # disconnect when an exception is encountered during event processing # (e.g., if an associated window has been closed) . If it is True, the # associated exception will be rethrown. raise_event_exceptions = False show_events = False def __init__(self, registry=None, event=None, callback=None): ''' Arguments: registry (ImageView, CallbackRegistry, or FigureCanvas): The object that will generate the callback. If the argument is an ImageView, the callback will be bound to the associated FigureCanvas. event (str): The event type for which callbacks should be generated. callback (callable): An optional callable object to handle the event. If not provided, the `handle_event` method of the MplCallback will be called to handle the event (this method must be defined by a derived class if `callback` is not provided. Note that these arguments can be deferred until `MplCallback.connect` is called. ''' self.set_registry(registry) self.event = event self.callback = callback self.cid = None self.is_connected = False self.children = [] def set_registry(self, registry=None): ''' Arguments: registry (ImageView, CallbackRegistry, or FigureCanvas): The object that will generate the callback. If the argument is an ImageView, the callback will be bound to the associated FigureCanvas. ''' from matplotlib.cbook import CallbackRegistry if isinstance(registry, CallbackRegistry): self.registry = registry elif isinstance(registry, ImageView): self.registry = registry.axes.figure.canvas else: self.registry = registry def connect(self, registry=None, event=None, callback=None): '''Binds the callback to the registry and begins receiving event. Arguments: registry (ImageView, CallbackRegistry, or FigureCanvas): The object that will generate the callback. If the argument is an ImageView, the callback will be bound to the associated FigureCanvas. event (str): The event type for which callbacks should be generated. callback (callable): An optional callable object to handle the event. If not provided, the `handle_event` method of the MplCallback will be called to handle the event (this method must be defined by a derived class if `callback` is not provided. Note that these arguments can also be provided to the constructor. ''' from matplotlib.cbook import CallbackRegistry if self.is_connected: raise Exception('Callback is already connected.') if registry is not None: self.set_registry(registry) if event is not None: self.event = event if callback is not None: self.callback = callback if self.callback is None: cb = self else: cb = self.callback if isinstance(self.registry, CallbackRegistry): self.cid = self.registry.connect(self.event, self) elif isinstance(self.registry, ImageView): self.cid = self.registry.connect(self.event, self) else: # Assume registry is an MPL canvas self.cid = self.registry.mpl_connect(self.event, self) self.is_connected = True for c in self.children: c.connect() def disconnect(self): '''Stops the callback from receiving events.''' from matplotlib.cbook import CallbackRegistry if isinstance(self.registry, CallbackRegistry): self.registry.disconnect(self.cid) else: # Assume registry is an MPL canvas self.registry.mpl_disconnect(self.cid) self.is_connected = False self.cid = None for c in self.children: c.disconnect() def __call__(self, *args, **kwargs): if self.callback is not None: try: self.callback(*args, **kwargs) except Exception as e: self.disconnect() if self.raise_event_exceptions: raise e else: try: self.handle_event(*args, **kwargs) except Exception as e: self.disconnect() if self.raise_event_exceptions: raise e class ImageViewCallback(MplCallback): '''Base class for callbacks that operate on ImageView objects.''' def __init__(self, view, *args, **kwargs): super(ImageViewCallback, self).__init__(*args, **kwargs) self.view = view class ParentViewPanCallback(ImageViewCallback): '''A callback to pan an image based on a click in another image.''' def __init__(self, child, parent, *args, **kwargs): ''' Arguments: `child` (ImageView): The view that will be panned based on a parent click event. `parent` (ImageView): The view whose click location will cause the child to pan. See ImageViewCallback and MplCallback for additional arguments. ''' super(ParentViewPanCallback, self).__init__(parent, *args, **kwargs) self.child = child def handle_event(self, event): if self.show_events: print(event, 'key = %s' % event.key) if event.inaxes is not self.view.axes: return (r, c) = xy_to_rowcol(event.xdata, event.ydata) (nrows, ncols) = self.view._image_shape if r < 0 or r >= nrows or c < 0 or c >= ncols: return kp = KeyParser(event.key) if event.button == 1 and kp.mods_are('ctrl'): self.child.pan_to(event.ydata, event.xdata) def connect(self): super(ParentViewPanCallback, self).connect(registry=self.view, event='button_press_event') class ImageViewKeyboardHandler(ImageViewCallback): '''Default handler for keyboard events in an ImageView.''' def __init__(self, view, *args, **kwargs): super(ImageViewKeyboardHandler, self).__init__(view, registry=view, event='key_press_event', *args, **kwargs) self.cb_key_release = ImageViewCallback(view, registry=view, event='key_release_event', callback=self.on_key_release, *args, **kwargs) # Must add to children member to automatically connect/disconnect. self.children.append(self.cb_key_release) self.idstr = '' def on_key_release(self, event): if self.show_events: print('key = %s' % event.key) kp = KeyParser(event.key) key = kp.key if key is None and self.view.selector is not None and \ self.view.selector.get_active() and kp.mods_are('shift') \ and self.view.selector.eventpress is not None: print('Resetting selection.') self.view.selector.eventpress = None self.view.selector.set_active(False) self.view.selection = None self.view.selector.to_draw.set_visible(False) self.view.refresh() def handle_event(self, event): from spectral import settings if self.show_events: print('key = %s' % event.key) kp = KeyParser(event.key) key = kp.key #----------------------------------------------------------- # Handling for keyboard input related to class ID assignment #----------------------------------------------------------- if key is None and kp.mods_are('shift') and \ self.view.selector is not None: # Rectangle selector is active while shift key is pressed self.view.selector.set_active(True) return if key in [str(i) for i in range(10)] and self.view.selector is not None: if self.view.selection is None: print('Select an image region before assigning a class ID.') return if len(self.idstr) > 0 and self.idstr[-1] == '!': print('Cancelled class ID assignment.') self.idstr = '' return else: self.idstr += key return if key == 'enter' and self.view.selector is not None: if self.view.selection is None: print('Select an image region before assigning a class ID.') return if len(self.idstr) == 0: print('Enter a numeric class ID before assigning a class ID.') return if self.idstr[-1] != '!': print('Press ENTER again to assign class %s to pixel ' \ 'region [%d:%d, %d:%d]:' \ % ((self.idstr,) + tuple(self.view.selection))) self.idstr += '!' return else: i = int(self.idstr[:-1]) n = self.view.label_region(self.view.selection, i) if n == 0: print('No pixels reassigned.') else: print('%d pixels reassigned to class %d.' % (n, i)) self.idstr = '' return if len(self.idstr) > 0: self.idstr = '' print('Cancelled class ID assignment.') #----------------------------------------------------------- # General keybinds #----------------------------------------------------------- if key == 'a' and self.view.display_mode == 'overlay': self.view.class_alpha = max(self.view.class_alpha - 0.05, 0) elif key == 'A' and self.view.display_mode == 'overlay': self.view.class_alpha = min(self.view.class_alpha + 0.05, 1) elif key == 'c': if self.view.classes is not None: self.view.set_display_mode('classes') elif key == 'C': if self.view.classes is not None \ and self.view.data_axes is not None: self.view.set_display_mode('overlay') elif key == 'd': if self.view.data_axes is not None: self.view.set_display_mode('data') elif key == 'h': self.print_help() elif key == 'i': if self.view.interpolation == 'nearest': self.view.interpolation = settings.imshow_interpolation else: self.view.interpolation = 'nearest' elif key == 'z': self.view.open_zoom() def print_help(self): print() print('Mouse Functions:') print('----------------') print('ctrl+left-click -> pan zoom window to pixel') print('shift+left-click&drag -> select rectangular image region') print('left-dblclick -> plot pixel spectrum') print() print('Keybinds:') print('---------') print('0-9 -> enter class ID for image pixel labeling') print('ENTER -> apply specified class ID to selected rectangular region') print('a/A -> decrease/increase class overlay alpha value') print('c -> set display mode to "classes" (if classes set)') print('C -> set display mode to "overlay" (if data and ' \ 'classes set)') print('d -> set display mode to "data" (if data set)') print('h -> print help message') print('i -> toggle pixel interpolation between "nearest" and ' \ 'SPy default.') print('z -> open zoom window') print() print('See matplotlib imshow documentation for addition key binds.') print() class KeyParser(object): '''Class to handle ambiguities in matplotlib event key values.''' aliases = {'ctrl': ['ctrl', 'control'], 'alt': ['alt'], 'shift': ['shift'], 'super': ['super']} def __init__(self, key_str=None): self.reset() if key_str is not None: self.parse(key_str) def reset(self): self.key = None self.modifiers = set() def parse(self, key_str): '''Extracts the key value and modifiers from a string.''' self.reset() if key_str is None: return tokens = key_str.split('+') for token in tokens[:-1]: mods = self.get_token_modifiers(token) if len(mods) == 0: raise ValueError('Unrecognized modifier: %s' % repr(token)) self.modifiers.update(mods) # For the final token, need to determine if it is a key or modifier mods = self.get_token_modifiers(tokens[-1]) if len(mods) > 0: self.modifiers.update(mods) else: self.key = tokens[-1] def has_mod(self, m): '''Returns True if `m` is one of the modifiers.''' return m in self.modifiers def mods_are(self, *args): '''Return True if modifiers are exactly the ones specified.''' for a in args: if a not in self.modifiers: return False return True def get_token_modifiers(self, token): mods = set() for (modifier, aliases) in list(self.aliases.items()): if token in aliases: mods.add(modifier) return mods class ImageViewMouseHandler(ImageViewCallback): def __init__(self, view, *args, **kwargs): super(ImageViewMouseHandler, self).__init__(view, registry=view, event='button_press_event', *args, **kwargs) def handle_event(self, event): '''Callback for click event in the image display.''' if self.show_events: print(event, ', key = %s' % event.key) if event.inaxes is not self.view.axes: return (r, c) = (int(event.ydata + 0.5), int(event.xdata + 0.5)) (nrows, ncols) = self.view._image_shape if r < 0 or r >= nrows or c < 0 or c >= ncols: return kp = KeyParser(event.key) if event.button == 1: if event.dblclick and kp.key is None: if self.view.source is not None: from spectral import settings import matplotlib.pyplot as plt if self.view.spectrum_plot_fig_id is None: f = plt.figure() self.view.spectrum_plot_fig_id = f.number try: f = plt.figure(self.view.spectrum_plot_fig_id) except: f = plt.figure() self.view.spectrum_plot_fig_id = f.number s = f.gca() settings.plotter.plot(self.view.source[r, c], self.view.source) s.xaxis.axes.relim() s.xaxis.axes.autoscale(True) f.canvas.draw() class SpyMplEvent(object): def __init__(self, name): self.name = name class ImageView(object): '''Class to manage events and data associated with image raster views. In most cases, it is more convenient to simply call :func:`~spectral.graphics.spypylab.imshow`, which creates, displays, and returns an :class:`ImageView` object. Creating an :class:`ImageView` object directly (or creating an instance of a subclass) enables additional customization of the image display (e.g., overriding default event handlers). If the object is created directly, call the :meth:`show` method to display the image. The underlying image display functionality is implemented via :func:`matplotlib.pyplot.imshow`. ''' selector_rectprops = dict(facecolor='red', edgecolor = 'black', alpha=0.5, fill=True) selector_lineprops = dict(color='black', linestyle='-', linewidth = 2, alpha=0.5) def __init__(self, data=None, bands=None, classes=None, source=None, **kwargs): ''' Arguments: `data` (ndarray or :class:`SpyFile`): The source of RGB bands to be displayed. with shape (R, C, B). If the shape is (R, C, 3), the last dimension is assumed to provide the red, green, and blue bands (unless the `bands` argument is provided). If :math:`B > 3` and `bands` is not provided, the first, middle, and last band will be used. `bands` (triplet of integers): Specifies which bands in `data` should be displayed as red, green, and blue, respectively. `classes` (ndarray of integers): An array of integer-valued class labels with shape (R, C). If the `data` argument is provided, the shape must match the first two dimensions of `data`. `source` (ndarray or :class:`SpyFile`): The source of spectral data associated with the image display. This optional argument is used to access spectral data (e.g., to generate a spectrum plot when a user double-clicks on the image display. Keyword arguments: Any keyword that can be provided to :func:`~spectral.graphics.graphics.get_rgb` or :func:`matplotlib.imshow`. ''' import spectral from spectral import settings self.is_shown = False self.imshow_data_kwargs = {'cmap': settings.imshow_float_cmap} self.rgb_kwargs = {} self.imshow_class_kwargs = {'zorder': 1} self.data = data self.data_rgb = None self.data_rgb_meta = {} self.classes = None self.class_rgb = None self.source = None self.bands = bands self.data_axes = None self.class_axes = None self.axes = None self._image_shape = None self.display_mode = None self._interpolation = None self.selection = None self.interpolation = kwargs.get('interpolation', settings.imshow_interpolation) if data is not None: self.set_data(data, bands, **kwargs) if classes is not None: self.set_classes(classes, **kwargs) if source is not None: self.set_source(source) self.class_colors = spectral.spy_colors self.spectrum_plot_fig_id = None self.parent = None self.selector = None self._on_parent_click_cid = None self._class_alpha = settings.imshow_class_alpha # Callbacks for events associated specifically with this window. self.callbacks = None # A sharable callback registry for related windows. If this # CallbackRegistry is set prior to calling ImageView.show (e.g., by # setting it equal to the `callbacks_common` member of another # ImageView object), then the registry will be shared. Otherwise, a new # callback registry will be created for this ImageView. self.callbacks_common = None check_disable_mpl_callbacks() def set_data(self, data, bands=None, **kwargs): '''Sets the data to be shown in the RGB channels. Arguments: `data` (ndarray or SpyImage): If `data` has more than 3 bands, the `bands` argument can be used to specify which 3 bands to display. `data` will be passed to `get_rgb` prior to display. `bands` (3-tuple of int): Indices of the 3 bands to display from `data`. Keyword Arguments: Any valid keyword for `get_rgb` or `matplotlib.imshow` can be given. ''' from .graphics import _get_rgb_kwargs self.data = data self.bands = bands rgb_kwargs = {} for k in _get_rgb_kwargs: if k in kwargs: rgb_kwargs[k] = kwargs.pop(k) self.set_rgb_options(**rgb_kwargs) self._update_data_rgb() if self._image_shape is None: self._image_shape = data.shape[:2] elif data.shape[:2] != self._image_shape: raise ValueError('Image shape is inconsistent with previously ' \ 'set data.') self.imshow_data_kwargs.update(kwargs) if 'interpolation' in self.imshow_data_kwargs: self.interpolation = self.imshow_data_kwargs['interpolation'] self.imshow_data_kwargs.pop('interpolation') if len(kwargs) > 0 and self.is_shown: msg = 'Keyword args to set_data only have an effect if ' \ 'given before the image is shown.' warnings.warn(UserWarning(msg)) if self.is_shown: self.refresh() def set_rgb_options(self, **kwargs): '''Sets parameters affecting RGB display of data. Accepts any keyword supported by :func:`~spectral.graphics.graphics.get_rgb`. ''' from .graphics import _get_rgb_kwargs for k in kwargs: if k not in _get_rgb_kwargs: raise ValueError('Unexpected keyword: {0}'.format(k)) self.rgb_kwargs = kwargs.copy() if self.is_shown: self._update_data_rgb() self.refresh() def _update_data_rgb(self): '''Regenerates the RGB values for display.''' from .graphics import get_rgb_meta (self.data_rgb, self.data_rgb_meta) = \ get_rgb_meta(self.data, self.bands, **self.rgb_kwargs) # If it is a gray-scale image, only keep the first RGB component so # matplotlib imshow's cmap can still be used. if self.data_rgb_meta['mode'] == 'monochrome' and \ self.data_rgb.ndim == 3: self.data_rgb = self.data_rgb[:, :, 0] def set_classes(self, classes, colors=None, **kwargs): '''Sets the array of class values associated with the image data. Arguments: `classes` (ndarray of int): `classes` must be an integer-valued array with the same number rows and columns as the display data (if set). `colors`: (array or 3-tuples): Color triplets (with values in the range [0, 255]) that define the colors to be associatd with the integer indices in `classes`. Keyword Arguments: Any valid keyword for `matplotlib.imshow` can be provided. ''' from .graphics import _get_rgb_kwargs self.classes = classes if classes is None: return if self._image_shape is None: self._image_shape = classes.shape[:2] elif classes.shape[:2] != self._image_shape: raise ValueError('Class data shape is inconsistent with ' \ 'previously set data.') if colors is not None: self.class_colors = colors kwargs = dict([item for item in list(kwargs.items()) if item[0] not in \ _get_rgb_kwargs]) self.imshow_class_kwargs.update(kwargs) if 'interpolation' in self.imshow_class_kwargs: self.interpolation = self.imshow_class_kwargs['interpolation'] self.imshow_class_kwargs.pop('interpolation') if len(kwargs) > 0 and self.is_shown: msg = 'Keyword args to set_classes only have an effect if ' \ 'given before the image is shown.' warnings.warn(UserWarning(msg)) if self.is_shown: self.refresh() def set_source(self, source): '''Sets the image data source (used for accessing spectral data). Arguments: `source` (ndarray or :class:`SpyFile`): The source for spectral data associated with the view. ''' self.source = source def show(self, mode=None, fignum=None): '''Renders the image data. Arguments: `mode` (str): Must be one of: "data": Show the data RGB "classes": Shows indexed color for `classes` "overlay": Shows class colors overlaid on data RGB. If `mode` is not provided, a mode will be automatically selected, based on the data set in the ImageView. `fignum` (int): Figure number of the matplotlib figure in which to display the ImageView. If not provided, a new figure will be created. ''' import matplotlib.pyplot as plt from spectral import settings if self.is_shown: msg = 'ImageView.show should only be called once.' warnings.warn(UserWarning(msg)) return set_mpl_interactive() kwargs = {} if fignum is not None: kwargs['num'] = fignum if settings.imshow_figure_size is not None: kwargs['figsize'] = settings.imshow_figure_size plt.figure(**kwargs) if self.data_rgb is not None: self.show_data() if self.classes is not None: self.show_classes() if mode is None: self._guess_mode() else: self.set_display_mode(mode) self.axes.format_coord = self.format_coord self.init_callbacks() self.is_shown = True def init_callbacks(self): '''Creates the object's callback registry and default callbacks.''' from spectral import settings from matplotlib.cbook import CallbackRegistry self.callbacks = CallbackRegistry() # callbacks_common may have been set to a shared external registry # (e.g., to the callbacks_common member of another ImageView object). So # don't create it if it has already been set. if self.callbacks_common is None: self.callbacks_common = CallbackRegistry() # Keyboard callback self.cb_mouse = ImageViewMouseHandler(self) self.cb_mouse.connect() # Mouse callback self.cb_keyboard = ImageViewKeyboardHandler(self) self.cb_keyboard.connect() # Class update event callback def updater(*args, **kwargs): if self.classes is None: self.set_classes(args[0].classes) self.refresh() callback = MplCallback(registry=self.callbacks_common, event='spy_classes_modified', callback=updater) callback.connect() self.cb_classes_modified = callback if settings.imshow_enable_rectangle_selector is False: return try: from matplotlib.widgets import RectangleSelector self.selector = RectangleSelector(self.axes, self._select_rectangle, button=1, useblit=True, spancoords='data', drawtype='box', rectprops = \ self.selector_rectprops) self.selector.set_active(False) except: self.selector = None msg = 'Failed to create RectangleSelector object. Interactive ' \ 'pixel class labeling will be unavailable.' warnings.warn(msg) def label_region(self, rectangle, class_id): '''Assigns all pixels in the rectangle to the specified class. Arguments: `rectangle` (4-tuple of integers): Tuple or list defining the rectangle bounds. Should have the form (row_start, row_stop, col_start, col_stop), where the stop indices are not included (i.e., the effect is `classes[row_start:row_stop, col_start:col_stop] = id`. class_id (integer >= 0): The class to which pixels will be assigned. Returns the number of pixels reassigned (the number of pixels in the rectangle whose class has *changed* to `class_id`. ''' if self.classes is None: self.classes = np.zeros(self.data_rgb.shape[:2], dtype=np.int16) r = rectangle n = np.sum(self.classes[r[0]:r[1], r[2]:r[3]] != class_id) if n > 0: self.classes[r[0]:r[1], r[2]:r[3]] = class_id event = SpyMplEvent('spy_classes_modified') event.classes = self.classes event.nchanged = n self.callbacks_common.process('spy_classes_modified', event) # Make selection rectangle go away. self.selector.to_draw.set_visible(False) self.refresh() return n return 0 def _select_rectangle(self, event1, event2): if event1.inaxes is not self.axes or event2.inaxes is not self.axes: self.selection = None return (r1, c1) = xy_to_rowcol(event1.xdata, event1.ydata) (r2, c2) = xy_to_rowcol(event2.xdata, event2.ydata) (r1, r2) = sorted([r1, r2]) (c1, c2) = sorted([c1, c2]) if (r2 < 0) or (r1 >= self._image_shape[0]) or \ (c2 < 0) or (c1 >= self._image_shape[1]): self.selection = None return r1 = max(r1, 0) r2 = min(r2, self._image_shape[0] - 1) c1 = max(c1, 0) c2 = min(c2, self._image_shape[1] - 1) print('Selected region: [%d: %d, %d: %d]' % (r1, r2 + 1, c1, c2 + 1)) self.selection = [r1, r2 + 1, c1, c2 + 1] self.selector.set_active(False) # Make the rectangle display until at least the next event self.selector.to_draw.set_visible(True) self.selector.update() def _guess_mode(self): '''Select an appropriate display mode, based on current data.''' if self.data_rgb is not None: self.set_display_mode('data') elif self.classes is not None: self.set_display_mode('classes') else: raise Exception('Unable to display image: no data set.') def show_data(self): '''Show the image data.''' import matplotlib.pyplot as plt if self.data_axes is not None: msg = 'ImageView.show_data should only be called once.' warnings.warn(UserWarning(msg)) return elif self.data_rgb is None: raise Exception('Unable to display data: data array not set.') if self.axes is not None: # A figure has already been created for the view. Make it current. plt.figure(self.axes.figure.number) self.imshow_data_kwargs['interpolation'] = self._interpolation self.data_axes = plt.imshow(self.data_rgb, **self.imshow_data_kwargs) if self.axes is None: self.axes = self.data_axes.axes def show_classes(self): '''Show the class values.''' import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap, NoNorm from spectral import get_rgb if self.class_axes is not None: msg = 'ImageView.show_classes should only be called once.' warnings.warn(UserWarning(msg)) return elif self.classes is None: raise Exception('Unable to display classes: class array not set.') cm = ListedColormap(np.array(self.class_colors) / 255.) self._update_class_rgb() kwargs = self.imshow_class_kwargs.copy() kwargs.update({'cmap': cm, 'norm':NoNorm(), 'interpolation': self._interpolation}) if self.axes is not None: # A figure has already been created for the view. Make it current. plt.figure(self.axes.figure.number) self.class_axes = plt.imshow(self.class_rgb, **kwargs) if self.axes is None: self.axes = self.class_axes.axes self.class_axes.set_zorder(1) if self.display_mode == 'overlay': self.class_axes.set_alpha(self._class_alpha) else: self.class_axes.set_alpha(1) #self.class_axes.axes.set_axis_bgcolor('black') def refresh(self): '''Updates the displayed data (if it has been shown).''' if self.is_shown: self._update_class_rgb() if self.class_axes is not None: self.class_axes.set_data(self.class_rgb) self.class_axes.set_interpolation(self._interpolation) elif self.display_mode in ('classes', 'overlay'): self.show_classes() if self.data_axes is not None: self.data_axes.set_data(self.data_rgb) self.data_axes.set_interpolation(self._interpolation) elif self.display_mode in ('data', 'overlay'): self.show_data() self.axes.figure.canvas.draw() def _update_class_rgb(self): if self.display_mode == 'overlay': self.class_rgb = np.ma.array(self.classes, mask=(self.classes==0)) else: self.class_rgb = np.array(self.classes) def set_display_mode(self, mode): '''`mode` must be one of ("data", "classes", "overlay").''' if mode not in ('data', 'classes', 'overlay'): raise ValueError('Invalid display mode: ' + repr(mode)) self.display_mode = mode show_data = mode in ('data', 'overlay') if self.data_axes is not None: self.data_axes.set_visible(show_data) show_classes = mode in ('classes', 'overlay') if self.classes is not None and self.class_axes is None: # Class data values were just set self.show_classes() if self.class_axes is not None: self.class_axes.set_visible(show_classes) if mode == 'classes': self.class_axes.set_alpha(1) else: self.class_axes.set_alpha(self._class_alpha) self.refresh() @property def class_alpha(self): '''alpha transparency for the class overlay.''' return self._class_alpha @class_alpha.setter def class_alpha(self, alpha): if alpha < 0 or alpha > 1: raise ValueError('Alpha value must be in range [0, 1].') self._class_alpha = alpha if self.class_axes is not None: self.class_axes.set_alpha(alpha) if self.is_shown: self.refresh() @property def interpolation(self): '''matplotlib pixel interpolation to use in the image display.''' return self._interpolation @interpolation.setter def interpolation(self, interpolation): if interpolation == self._interpolation: return self._interpolation = interpolation if not self.is_shown: return if self.data_axes is not None: self.data_axes.set_interpolation(interpolation) if self.class_axes is not None: self.class_axes.set_interpolation(interpolation) self.refresh() def set_title(self, s): if self.is_shown: self.axes.set_title(s) self.refresh() def open_zoom(self, center=None, size=None): '''Opens a separate window with a zoomed view. If a ctrl-lclick event occurs in the original view, the zoomed window will pan to the location of the click event. Arguments: `center` (two-tuple of int): Initial (row, col) of the zoomed view. `size` (int): Width and height (in source image pixels) of the initial zoomed view. Returns: A new ImageView object for the zoomed view. ''' from spectral import settings import matplotlib.pyplot as plt if size is None: size = settings.imshow_zoom_pixel_width (nrows, ncols) = self._image_shape fig_kwargs = {} if settings.imshow_zoom_figure_width is not None: width = settings.imshow_zoom_figure_width fig_kwargs['figsize'] = (width, width) fig = plt.figure(**fig_kwargs) view = ImageView(source=self.source) view.set_data(self.data, self.bands, **self.rgb_kwargs) view.set_classes(self.classes, self.class_colors) view.imshow_data_kwargs = self.imshow_data_kwargs.copy() kwargs = {'extent': (-0.5, ncols - 0.5, nrows - 0.5, -0.5)} view.imshow_data_kwargs.update(kwargs) view.imshow_class_kwargs = self.imshow_class_kwargs.copy() view.imshow_class_kwargs.update(kwargs) view.callbacks_common = self.callbacks_common view.spectrum_plot_fig_id = self.spectrum_plot_fig_id view.show(fignum=fig.number, mode=self.display_mode) view.axes.set_xlim(0, size) view.axes.set_ylim(size, 0) view.interpolation = 'nearest' if center is not None: view.pan_to(*center) view.cb_parent_pan = ParentViewPanCallback(view, self) view.cb_parent_pan.connect() return view def pan_to(self, row, col): '''Centers view on pixel coordinate (row, col).''' if self.axes is None: raise Exception('Cannot pan image until it is shown.') (xmin, xmax) = self.axes.get_xlim() (ymin, ymax) = self.axes.get_ylim() xrange_2 = (xmax - xmin) / 2.0 yrange_2 = (ymax - ymin) / 2.0 self.axes.set_xlim(col - xrange_2, col + xrange_2) self.axes.set_ylim(row - yrange_2, row + yrange_2) self.axes.figure.canvas.draw() def zoom(self, scale): '''Zooms view in/out (`scale` > 1 zooms in).''' (xmin, xmax) = self.axes.get_xlim() (ymin, ymax) = self.axes.get_ylim() x = (xmin + xmax) / 2.0 y = (ymin + ymax) / 2.0 dx = (xmax - xmin) / 2.0 / scale dy = (ymax - ymin) / 2.0 / scale self.axes.set_xlim(x - dx, x + dx) self.axes.set_ylim(y - dy, y + dy) self.refresh() def format_coord(self, x, y): '''Formats pixel coordinate string displayed in the window.''' (nrows, ncols) = self._image_shape if x < -0.5 or x > ncols - 0.5 or y < -0.5 or y > nrows - 0.5: return "" (r, c) = xy_to_rowcol(x, y) s = 'pixel=[%d,%d]' % (r, c) if self.classes is not None: try: s += ' class=%d' % self.classes[r, c] except: pass return s def __str__(self): meta = self.data_rgb_meta s = 'ImageView object:\n' if 'bands' in meta: s += ' {0:<20}: {1}\n'.format("Display bands", meta['bands']) if self.interpolation == None: interp = "" else: interp = self.interpolation s += ' {0:<20}: {1}\n'.format("Interpolation", interp) if 'rgb range' in meta: s += ' {0:<20}:\n'.format("RGB data limits") for (c, r) in zip('RGB', meta['rgb range']): s += ' {0}: {1}\n'.format(c, str(r)) return s def __repr__(self): return str(self) def imshow(data=None, bands=None, classes=None, source=None, colors=None, figsize=None, fignum=None, title=None, **kwargs): '''A wrapper around matplotlib's imshow for multi-band images. Arguments: `data` (SpyFile or ndarray): Can have shape (R, C) or (R, C, B). `bands` (tuple of integers, optional) If `bands` has 3 values, the bands specified are extracted from `data` to be plotted as the red, green, and blue colors, respectively. If it contains a single value, then a single band will be extracted from the image. `classes` (ndarray of integers): An array of integer-valued class labels with shape (R, C). If the `data` argument is provided, the shape must match the first two dimensions of `data`. The returned `ImageView` object will use a copy of this array. To access class values that were altered after calling `imshow`, access the `classes` attribute of the returned `ImageView` object. `source` (optional, SpyImage or ndarray): Object used for accessing image source data. If this argument is not provided, events such as double-clicking will have no effect (i.e., a spectral plot will not be created). `colors` (optional, array of ints): Custom colors to be used for class image view. If provided, this argument should be an array of 3-element arrays, each of which specifies an RGB triplet with integer color components in the range [0, 256). `figsize` (optional, 2-tuple of scalar): Specifies the width and height (in inches) of the figure window to be created. If this value is not provided, the value specified in `spectral.settings.imshow_figure_size` will be used. `fignum` (optional, integer): Specifies the figure number of an existing matplotlib figure. If this argument is None, a new figure will be created. `title` (str): The title to be displayed above the image. Keywords: Keywords accepted by :func:`~spectral.graphics.graphics.get_rgb` or :func:`matplotlib.imshow` will be passed on to the appropriate function. This function defaults the color scale (imshow's "cmap" keyword) to "gray". To use imshow's default color scale, call this function with keyword `cmap=None`. Returns: An `ImageView` object, which can be subsequently used to refine the image display. See :class:`~spectral.graphics.spypylab.ImageView` for additional details. Examples: Show a true color image of a hyperspectral image: >>> data = open_image('92AV3C.lan').load() >>> view = imshow(data, bands=(30, 20, 10)) Show ground truth in a separate window: >>> classes = open_image('92AV3GT.GIS').read_band(0) >>> cview = imshow(classes=classes) Overlay ground truth data on the data display: >>> view.set_classes(classes) >>> view.set_display_mode('overlay') Show RX anomaly detector results in the view and a zoom window showing true color data: >>> x = rx(data) >>> zoom = view.open_zoom() >>> view.set_data(x) Note that pressing ctrl-lclick with the mouse in the main window will cause the zoom window to pan to the clicked location. Opening zoom windows, changing display modes, and other functions can also be achieved via keys mapped directly to the displayed image. Press "h" with focus on the displayed image to print a summary of mouse/ keyboard commands accepted by the display. ''' import matplotlib.pyplot as plt from spectral import settings from .graphics import get_rgb set_mpl_interactive() view = ImageView() if data is not None: view.set_data(data, bands, **kwargs) if classes is not None: view.set_classes(classes, colors, **kwargs) if source is not None: view.set_source(source) elif data is not None and len(data.shape) == 3 and data.shape[2] > 3: view.set_source(data) if fignum is not None or figsize is not None: fig = plt.figure(num=fignum, figsize=figsize) view.show(fignum=fig.number) else: view.show() if title is not None: view.set_title(title) return view def plot(data, source=None): ''' Creates an x-y plot. USAGE: plot(data) If data is a vector, all the values in data will be drawn in a single series. If data is a 2D array, each column of data will be drawn as a separate series. ''' import matplotlib.pyplot as plt import spectral set_mpl_interactive() if source is not None and hasattr(source, 'bands') and \ source.bands.centers is not None: xvals = source.bands.centers else: xvals = list(range(data.shape[-1])) if data.ndim == 1: data = data[np.newaxis, :] data = data.reshape(-1, data.shape[-1]) if source is not None and hasattr(source, 'metadata') and \ 'bbl' in source.metadata: # Do not plot bad bands data = np.array(data) data[:, np.array(source.metadata['bbl']) == 0] = None for x in data: p = plt.plot(xvals, x) spectral._xyplot = p plt.grid(1) if source is not None and hasattr(source, 'bands'): if source.bands.band_quantity is not None: xlabel = source.bands.band_quantity else: xlabel = '' if source.bands.band_unit is not None: if len(xlabel) > 0: xlabel += ' (%s)' % source.bands.band_unit else: xlabel = str(source.bands.band_unit) plt.xlabel(xlabel) return p def set_mpl_interactive(): '''Ensure matplotlib is in interactive mode.''' import matplotlib.pyplot as plt if not plt.isinteractive(): plt.interactive(True) spectral-0.22.4/spectral/graphics/spywxpython.py000066400000000000000000000033761412674721200220560ustar00rootroot00000000000000''' Classes and functions for viewing/manipulating images using wxWindows. In order to use wxWindows and still have a command line interface, wxWindows must be imported in a separate thread and all GUI objects must be referenced in that thread. Thus, much of the actual GUI code is in SpyWxPythonThread.py. ''' from __future__ import absolute_import, division, print_function, unicode_literals viewer = None class SpyWxPythonThreadStarter: def start(self): '''Starts the GUI thread.''' import _thread import time _thread.start_new_thread(self.run, ()) def run(self): ''' This is the first function executed in the wxWindows thread. It creates the wxApp and starts the main event loop. ''' from .spywxpythonthread import WxImageServer self.app = WxImageServer(0) self.app.MainLoop() def view(self, rgb, **kwargs): '''Sends a view request to the wxWindows thread.''' from . import spywxpythonthread evt = spywxpythonthread.view_imageRequest(rgb, **kwargs) spywxpythonthread.wx.PostEvent(self.app.catcher, evt) def init(): global viewer viewer = SpyWxPythonThreadStarter() viewer.start() def view(*args, **kwargs): '''Displays an image in a wxWindows frame.''' from . import graphics from spectral.spectral import Image import numpy as np rgb = graphics.get_rgb(*args, **kwargs) # To plot pixel spectrum on double-click, create a reference # back to the original SpyFile object. if isinstance(args[0], Image): kwargs["data source"] = args[0] if "colors" not in kwargs: rgb = (rgb * 255).astype(np.uint8) else: rgb = rgb.astype(np.uint8) viewer.view(rgb, **kwargs) spectral-0.22.4/spectral/graphics/spywxpythonthread.py000066400000000000000000000074601412674721200232440ustar00rootroot00000000000000''' wxWindows code which executes in a separate thread from the main thread This module handles disply of images and related events. ''' from __future__ import absolute_import, division, print_function, unicode_literals DEFAULT_X_SIZE = 600 DEFAULT_Y_SIZE = 600 import logging from wx import * #from Numeric import * from spectral.graphics import * logger = logging.getLogger('spectral') #--------------------------------------------------------------------------- #wxEVT_VIEW_IMAGE = wxID_HIGHEST + 1 wxEVT_VIEW_IMAGE = 50002 def EVT_VIEW_IMAGE(win, func): win.Connect(-1, -1, wxEVT_VIEW_IMAGE, func) class view_imageRequest(wx.PyEvent): '''A request for a new image.''' def __init__(self, rgb, **kwargs): wx.PyEvent.__init__(self) self.SetEventType(wxEVT_VIEW_IMAGE) self.rgb = rgb self.kwargs = kwargs class HiddenCatcher(wx.Frame): ''' The "catcher" frame in the second thread. It is invisible. It's only job is to receive events from the main thread, and create the appropriate windows. ''' def __init__(self): wx.Frame.__init__(self, None, -1, '') EVT_VIEW_IMAGE(self, self.view_image) # self.bmp = wxBitmap("/dos/myphotos/roll2/can.bmp", # wxBITMAP_TYPE_BMP) def view_image(self, evt): if 'function' in evt.kwargs: frame = evt.kwargs['function']() frame.Show(True) self.app.SetTopWindow(frame) frame.Raise() else: frame = WxImageFrame(None, -1, evt.rgb, **evt.kwargs) frame.Show(True) self.app.SetTopWindow(frame) class WxImageFrame(wx.Frame): ''' WxImageFrame is the primary wxWindows object for displaying SPy images. The frames also handle left double-click events by displaying an x-y plot of the spectrum for the associated pixel. ''' def __init__(self, parent, index, rgb, **kwargs): if 'title' in kwargs: title = kwargs['title'] else: title = 'SPy Image' # wxFrame.__init__(self, parent, index, "SPy Frame") # wxScrolledWindow.__init__(self, parent, index, style = wxSUNKEN_BORDER) img = wx.EmptyImage(rgb.shape[0], rgb.shape[1]) img = wx.EmptyImage(rgb.shape[1], rgb.shape[0]) img.SetData(rgb.tostring()) self.bmp = img.ConvertToBitmap() self.kwargs = kwargs wx.Frame.__init__(self, parent, index, title, wx.DefaultPosition) self.SetClientSizeWH(self.bmp.GetWidth(), self.bmp.GetHeight()) EVT_PAINT(self, self.on_paint) EVT_LEFT_DCLICK(self, self.left_double_click) def on_paint(self, e): dc = wx.PaintDC(self) self.paint(dc) def paint(self, dc): # mDC = wxMemoryDC() # mDC.SelectObject(bmp) # mDC.DrawBitmap(bmp, 0, 0) dc.BeginDrawing() dc.DrawBitmap(self.bmp, 0, 0) # dc.Blit(0,0, bmp.GetWidth(), bmp.GetHeight(), mDC, 0, 0) dc.EndDrawing() def left_double_click(self, evt): logger.debug('LEFT DOUBLE-CLICK at {}'.format((evt.m_y, evt.m_x))) from spectral import settings if "data source" in self.kwargs: settings.plotter.plot(self.kwargs["data source"][evt.m_y, evt.m_x], source=self.kwargs["data source"]) class WxImageServer(wx.App): ''' An image server built on wxPython. This image server runs in a separate thread, displaying raster images and handling events related to the images. DO NOT construct a WxImageServer object directly. Call StartWxImageServer instead. ''' def OnInit(self): catcher = HiddenCatcher() catcher.app = self #self.SetTopWindow(catcher) self.catcher = catcher return True spectral-0.22.4/spectral/image.py000066400000000000000000000153151412674721200167000ustar00rootroot00000000000000''' Generic functions for handling spectral images. ''' from __future__ import absolute_import, division, print_function, unicode_literals import numbers import numpy as np from .spectral import BandInfo class Image(object): '''spectral.Image is the common base class for spectral image objects.''' def __init__(self, params, metadata=None): self.bands = BandInfo() self.set_params(params, metadata) def set_params(self, params, metadata): try: self.nbands = params.nbands self.nrows = params.nrows self.ncols = params.ncols self.dtype = params.dtype if not metadata: self.metadata = {} else: self.metadata = metadata except: raise def params(self): '''Return an object containing the SpyFile parameters.''' class P: pass p = P() p.nbands = self.nbands p.nrows = self.nrows p.ncols = self.ncols p.metadata = self.metadata p.dtype = self.dtype return p def __repr__(self): return self.__str__() class ImageArray(np.ndarray, Image): '''ImageArray is an interface to an image loaded entirely into memory. ImageArray objects are returned by :meth:`spectral.SpyFile.load`. This class inherits from both numpy.ndarray and Image, providing the interfaces of both classes. ''' format = 'f' # Use 4-byte floats for data arrays def __new__(subclass, data, spyfile): obj = np.asarray(data).view(subclass) ImageArray.__init__(obj, data, spyfile) return obj def __init__(self, data, spyfile): # Add param data to Image initializer params = spyfile.params() params.dtype = data.dtype params.swap = 0 Image.__init__(self, params, spyfile.metadata) self.bands = spyfile.bands self.filename = spyfile.filename self.interleave = 2 # bip def __repr__(self): lst = np.array2string(np.asarray(self), prefix="ImageArray(") return "{}({}, dtype={})".format('ImageArray', lst, self.dtype.name) def __getitem__(self, args): # Duplicate the indexing behavior of SpyFile. If args is iterable # with length greater than one, and if not all of the args are # scalars, then the scalars need to be replaced with slices. try: iterator = iter(args) except TypeError: if isinstance(args, numbers.Number): if args == -1: updated_args = slice(args, None) else: updated_args = slice(args, args+1) else: updated_args = args return self._parent_getitem(updated_args) keep_original_args = True updated_args = [] for arg in iterator: if isinstance(arg, numbers.Number): if arg == -1: updated_args.append(slice(arg, None)) else: updated_args.append(slice(arg, arg+1)) elif isinstance(arg, np.bool_): updated_args.append(arg) else: updated_args.append(arg) keep_original_args = False if keep_original_args: updated_args = args else: updated_args = tuple(updated_args) return self._parent_getitem(updated_args) def _parent_getitem(self, args): return np.ndarray.__getitem__(self, args) def read_band(self, i): ''' For compatibility with SpyFile objects. Returns arr[:,:,i].squeeze() ''' return np.asarray(self[:, :, i].squeeze()) def read_bands(self, bands): '''For SpyFile compatibility. Equivlalent to arr.take(bands, 2)''' return np.asarray(self.take(bands, 2)) def read_pixel(self, row, col): '''For SpyFile compatibility. Equivlalent to arr[row, col]''' return np.asarray(self[row, col]) def read_subregion(self, row_bounds, col_bounds, bands=None): ''' For SpyFile compatibility. Equivalent to arr[slice(*row_bounds), slice(*col_bounds), bands], selecting all bands if none are specified. ''' if bands: return np.asarray(self[slice(*row_bounds), slice(*col_bounds), bands]) else: return np.asarray(self[slice(*row_bounds), slice(*col_bounds)]) def read_subimage(self, rows, cols, bands=None): ''' For SpyFile compatibility. Equivalent to arr[rows][:, cols][:, :, bands], selecting all bands if none are specified. ''' if bands: return np.asarray(self[rows][:, cols][:, :, bands]) else: return np.asarray(self[rows][:, cols]) def read_datum(self, i, j, k): '''For SpyFile compatibility. Equivlalent to arr[i, j, k]''' return np.asscalar(self[i, j, k]) def load(self): '''For compatibility with SpyFile objects. Returns self''' return self def asarray(self, writable=False): '''Returns an object with a standard numpy array interface. The return value is the same as calling `numpy.asarray`, except that the array is not writable by default to match the behavior of `SpyFile.asarray`. This function is for compatibility with SpyFile objects. Keyword Arguments: `writable` (bool, default False): If `writable` is True, modifying values in the returned array will result in corresponding modification to the ImageArray object. ''' arr = np.asarray(self) if not writable: arr.setflags(write=False) return arr def info(self): s = '\t# Rows: %6d\n' % (self.nrows) s += '\t# Samples: %6d\n' % (self.ncols) s += '\t# Bands: %6d\n' % (self.shape[2]) s += '\tData format: %8s' % self.dtype.name return s def __array_wrap__(self, out_arr, context=None): # The ndarray __array_wrap__ causes ufunc results to be of type # ImageArray. Instead, return a plain ndarray. return out_arr # Some methods do not call __array_wrap__ and will return an ImageArray. # Currently, these need to be overridden individually or with # __getattribute__ magic. def __getattribute__(self, name): if ((name in np.ndarray.__dict__) and (name not in ImageArray.__dict__)): return getattr(np.asarray(self), name) return super(ImageArray, self).__getattribute__(name) spectral-0.22.4/spectral/io/000077500000000000000000000000001412674721200156465ustar00rootroot00000000000000spectral-0.22.4/spectral/io/__init__.py000066400000000000000000000002661412674721200177630ustar00rootroot00000000000000from __future__ import absolute_import, division, print_function, unicode_literals from .spyfile import SpyFile from ..io import aviris from ..io import erdas from ..io import envi spectral-0.22.4/spectral/io/aviris.py000066400000000000000000000043511412674721200175200ustar00rootroot00000000000000''' Functions for handling AVIRIS image files. ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np import glob import os import spectral as spy from ..spectral import BandInfo from ..utilities.python23 import IS_PYTHON3 from .bipfile import BipFile from .spyfile import find_file_path, InvalidFileError if IS_PYTHON3: import builtins else: import __builtin__ as builtins def open(file, band_file=None): ''' Returns a SpyFile object for an AVIRIS image file. Arguments: `file` (str): Name of the AVIRIS data file. `band_file` (str): Optional name of the AVIRIS spectral calibration file. Returns: A SpyFile object for the image file. Raises: spectral.io.spyfile.InvalidFileError ''' class Params: pass p = Params() p.filename = find_file_path(file) p.nbands = 224 p.ncols = 614 fileSize = os.stat(p.filename)[6] if fileSize % 275072 != 0: raise InvalidFileError('File size not consistent with AVIRIS format.') p.nrows = int(fileSize / 275072) p.byte_order = 1 p.dtype = np.dtype('i2').str if spy.byte_order != 1: p.dtype = np.dtype(p.dtype).newbyteorder().str metadata = {'default bands': ['29', '18', '8']} p.offset = 0 img = BipFile(p, metadata) img.scale_factor = 10000.0 if band_file: img.bands = read_aviris_bands(find_file_path(band_file)) return img def read_aviris_bands(cal_filename): ''' Returns a BandInfo object for an AVIRIS spectral calibration file. Arguments: `cal_filename` (str): Name of the AVIRIS spectral calibration file. Returns: A :class:`spectral.BandInfo` object ''' bands = BandInfo() bands.band_quantity = 'Wavelength' bands.band_unit = 'nm' fin = builtins.open(find_file_path(cal_filename)) rows = [line.split() for line in fin] rows = [[float(x) for x in row] for row in rows if len(row) == 5] columns = list(zip(*rows)) bands.centers = columns[0] bands.bandwidths = columns[1] bands.center_stdevs = columns[2] bands.bandwidth_stdevs = columns[3] bands.band_unit = 'nm' return bands spectral-0.22.4/spectral/io/bilfile.py000066400000000000000000000312021412674721200176240ustar00rootroot00000000000000''' Code for handling files that are band interleaved by line (BIL). ''' from __future__ import absolute_import, division, print_function, unicode_literals import array import logging import numpy as np import os import sys import spectral as spy from ..utilities.python23 import typecode, tobytes, frombytes from .spyfile import SpyFile, MemmapFile byte_typecode = typecode('b') class BilFile(SpyFile, MemmapFile): ''' A class to represent image files stored with bands interleaved by line. ''' def __init__(self, params, metadata=None): self.interleave = spy.BIL if metadata is None: metadata = {} SpyFile.__init__(self, params, metadata) self._memmap = self._open_memmap('r') def _open_memmap(self, mode): logger = logging.getLogger('spectral') if (os.path.getsize(self.filename) < sys.maxsize): try: (R, C, B) = self.shape return np.memmap(self.filename, dtype=self.dtype, mode=mode, offset=self.offset, shape=(R, B, C)) except: logger.debug('Unable to create memmap interface.') return None else: return None def read_band(self, band, use_memmap=True): '''Reads a single band from the image. Arguments: `band` (int): Index of band to read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxN` array of values for the specified band. ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[:, band, :]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data vals = array.array(byte_typecode) offset = self.offset + band * self.sample_size * self.ncols f = self.fid # Pixel format is BIL, so read an entire line at time. for i in range(self.nrows): f.seek(offset + i * self.sample_size * self.nbands * self.ncols, 0) vals.fromfile(f, self.ncols * self.sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) arr = arr.reshape((self.nrows, self.ncols)) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_bands(self, bands, use_memmap=True): '''Reads multiple bands from the image. Arguments: `bands` (list of ints): Indices of bands to read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array of values for the specified bands. `M` and `N` are the number of rows & columns in the image and `L` equals len(`bands`). ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[:, bands, :]).transpose((0, 2, 1)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data f = self.fid arr = np.empty((self.nrows, self.ncols, len(bands)), self.dtype) for i in range(self.nrows): vals = array.array(byte_typecode) row_offset = self.offset + i * (self.sample_size * self.nbands * self.ncols) # Pixel format is BIL, so read an entire line at a time. for j in range(len(bands)): f.seek(row_offset + bands[j] * self.sample_size * self.ncols, 0) vals.fromfile(f, self.ncols * self.sample_size) frame = np.frombuffer(tobytes(vals), dtype=self.dtype) arr[i, :, :] = frame.reshape((len(bands), self.ncols)).transpose() if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_pixel(self, row, col, use_memmap=True): '''Reads the pixel at position (row,col) from the file. Arguments: `row`, `col` (int): Indices of the row & column for the pixel `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` A length-`B` array, where `B` is the number of image bands. ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[row, :, col]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data vals = array.array(byte_typecode) delta = self.sample_size * (self.nbands - 1) offset = self.offset + row * self.nbands * self.ncols \ * self.sample_size + col * self.sample_size f = self.fid ncols = self.ncols sample_size = self.sample_size for i in range(self.nbands): f.seek(offset + i * sample_size * ncols, 0) vals.fromfile(f, sample_size) pixel = np.frombuffer(tobytes(vals), dtype=self.dtype) if self.scale_factor != 1: return pixel / float(self.scale_factor) return pixel def read_subregion(self, row_bounds, col_bounds, bands=None, use_memmap=True): ''' Reads a contiguous rectangular sub-region from the image. Arguments: `row_bounds` (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. `col_bounds` (2-tuple of ints): (a, b) -> Columnss a through b-1 will be read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array. ''' if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], :, col_bounds[0]: col_bounds[1]]) else: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], bands, col_bounds[0]: col_bounds[1]]) data = data.transpose((0, 2, 1)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data nSubRows = row_bounds[1] - row_bounds[0] # Rows in sub-image nSubCols = col_bounds[1] - col_bounds[0] # Cols in sub-image d_row = self.sample_size * self.ncols * self.nbands colStartPos = col_bounds[0] * self.sample_size f = self.fid f.seek(self.offset, 0) # Increments between bands if bands is None: # Read all bands. bands = list(range(self.nbands)) arr = np.empty((nSubRows, nSubCols, len(bands)), self.dtype) offset = self.offset ncols = self.ncols sampleSize = self.sample_size nSubBands = len(bands) # Pixel format is BIL for i in range(row_bounds[0], row_bounds[1]): f.seek(offset + i * d_row + colStartPos, 0) rowPos = f.tell() vals = array.array(byte_typecode) for j in bands: f.seek(rowPos + j * ncols * sampleSize, 0) vals.fromfile(f, nSubCols * sampleSize) subArray = np.frombuffer(tobytes(vals), dtype=self.dtype) subArray = subArray.reshape((nSubBands, nSubCols)) arr[i - row_bounds[0], :, :] = np.transpose(subArray) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_subimage(self, rows, cols, bands=None, use_memmap=False): ''' Reads arbitrary rows, columns, and bands from the image. Arguments: `rows` (list of ints): Indices of rows to read. `cols` (list of ints): Indices of columns to read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default False): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`), and `L` = len(bands) (or # of image bands if `bands` == None). ''' if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap.take(rows, 0).take(cols, 2)) else: data = np.array( self._memmap.take(rows, 0).take(bands, 1).take(cols, 2)) data = data.transpose((0, 2, 1)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data nSubRows = len(rows) # Rows in sub-image nSubCols = len(cols) # Cols in sub-image d_col = self.sample_size d_band = d_col * self.ncols d_row = d_band * self.nbands f = self.fid f.seek(self.offset, 0) # Increments between bands if bands is None: # Read all bands. bands = list(range(self.nbands)) nSubBands = len(bands) arr = np.empty((nSubRows, nSubCols, nSubBands), self.dtype) offset = self.offset vals = array.array(byte_typecode) sample_size = self.sample_size # Pixel format is BIL for i in rows: for j in cols: for k in bands: f.seek(offset + i * d_row + j * d_col + k * d_band, 0) vals.fromfile(f, sample_size) subArray = np.frombuffer(tobytes(vals), dtype=self.dtype) subArray = subArray.reshape((nSubRows, nSubCols, nSubBands)) if self.scale_factor != 1: return subArray / float(self.scale_factor) return subArray def read_datum(self, i, j, k, use_memmap=True): '''Reads the band `k` value for pixel at row `i` and column `j`. Arguments: `i`, `j`, `k` (integer): Row, column and band index, respectively. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Using this function is not an efficient way to iterate over bands or pixels. For such cases, use readBands or readPixel instead. ''' if self._memmap is not None and use_memmap is True: datum = self._memmap[i, k, j] if self.scale_factor != 1: datum /= float(self.scale_factor) return datum d_col = self.sample_size d_band = d_col * self.ncols d_row = d_band * self.nbands self.fid.seek(self.offset + i * d_row + j * d_col + k * d_band, 0) vals = array.array(byte_typecode) vals.fromfile(self.fid, self.sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) return arr.tolist()[0] / float(self.scale_factor) spectral-0.22.4/spectral/io/bipfile.py000066400000000000000000000321041412674721200176320ustar00rootroot00000000000000''' Code for handling files that are band interleaved by pixel (BIP). ''' from __future__ import absolute_import, division, print_function, unicode_literals import array import logging import numpy as np import os import sys import spectral as spy from .spyfile import SpyFile, MemmapFile from spectral.utilities.python23 import typecode, tobytes, frombytes byte_typecode = typecode('b') class BipFile(SpyFile, MemmapFile): ''' A class to interface image files stored with bands interleaved by pixel. ''' def __init__(self, params, metadata=None): self.interleave = spy.BIP if metadata is None: metadata = {} SpyFile.__init__(self, params, metadata) self._memmap = self._open_memmap('r') def _open_memmap(self, mode): logger = logging.getLogger('spectral') if (os.path.getsize(self.filename) < sys.maxsize): try: (R, C, B) = self.shape return np.memmap(self.filename, dtype=self.dtype, mode=mode, offset=self.offset, shape=self.shape) except: logger.debug('Unable to create memmap interface.') return None else: return None def read_band(self, band, use_memmap=True): '''Reads a single band from the image. Arguments: `band` (int): Index of band to read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxN` array of values for the specified band. ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[:, :, band]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data vals = array.array(byte_typecode) delta = self.sample_size * (self.nbands - 1) nVals = self.nrows * self.ncols sample_size = self.sample_size f = self.fid f.seek(self.offset + self.sample_size * band, 0) # Pixel format is BIP for i in range(nVals - 1): vals.fromfile(f, sample_size) f.seek(delta, 1) vals.fromfile(f, sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) arr = arr.reshape(self.nrows, self.ncols) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_bands(self, bands, use_memmap=True): '''Reads multiple bands from the image. Arguments: `bands` (list of ints): Indices of bands to read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array of values for the specified bands. `M` and `N` are the number of rows & columns in the image and `L` equals len(`bands`). ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[:, :, bands]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data vals = array.array(byte_typecode) offset = self.offset delta = self.sample_size * self.nbands nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands delta_b = list(bands[:]) for i in range(len(delta_b)): delta_b[i] *= self.sample_size f = self.fid # Pixel format is BIP for i in range(nVals): pixelOffset = offset + i * delta for j in range(len(bands)): f.seek(pixelOffset + delta_b[j], 0) # Next band vals.fromfile(f, sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) arr = arr.reshape(self.nrows, self.ncols, len(bands)) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_pixel(self, row, col, use_memmap=True): '''Reads the pixel at position (row,col) from the file. Arguments: `row`, `col` (int): Indices of the row & column for the pixel `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` A length-`B` array, where `B` is the number of image bands. ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[row, col, :]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data vals = array.array(byte_typecode) f = self.fid f.seek(self.offset + self.sample_size * self.nbands * (row * self.ncols + col), 0) # Pixel format is BIP so read entire pixel. vals.fromfile(f, self.nbands * self.sample_size) pixel = np.frombuffer(tobytes(vals), dtype=self.dtype) if self.scale_factor != 1: return pixel / float(self.scale_factor) return pixel def read_subregion(self, row_bounds, col_bounds, bands=None, use_memmap=True): ''' Reads a contiguous rectangular sub-region from the image. Arguments: `row_bounds` (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. `col_bounds` (2-tuple of ints): (a, b) -> Columnss a through b-1 will be read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array. ''' if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1], :]) else: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1], bands]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data offset = self.offset nbands = self.nbands nSubRows = row_bounds[1] - row_bounds[0] # Rows in sub-image nSubCols = col_bounds[1] - col_bounds[0] # Cols in sub-image d_row = self.sample_size * self.ncols * self.nbands colStartPos = col_bounds[0] * self.sample_size * self.nbands vals = array.array(byte_typecode) nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands if bands is not None: allBands = 0 nSubBands = len(bands) delta_b = bands[:] for i in range(len(delta_b)): delta_b[i] *= self.sample_size else: allBands = 1 nSubBands = self.nbands f = self.fid # Pixel format is BIP for i in range(row_bounds[0], row_bounds[1]): f.seek(offset + i * d_row + colStartPos, 0) rowPos = f.tell() if allBands: # This is the simple one vals.fromfile(f, nSubCols * nbands * sample_size) else: # Need to pull out specific bands for each column. for j in range(nSubCols): f.seek(rowPos + j * self.sample_size * self.nbands, 0) pixelPos = f.tell() for k in range(len(bands)): f.seek(pixelPos + delta_b[k], 0) # Next band vals.fromfile(f, sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) arr = arr.reshape(nSubRows, nSubCols, nSubBands) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_subimage(self, rows, cols, bands=None, use_memmap=False): ''' Reads arbitrary rows, columns, and bands from the image. Arguments: `rows` (list of ints): Indices of rows to read. `cols` (list of ints): Indices of columns to read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default False): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`), and `L` = len(bands) (or # of image bands if `bands` == None). ''' if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap.take(rows, 0).take(cols, 1)) else: data = np.array( self._memmap.take(rows, 0).take(cols, 1).take(bands, 2)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data offset = self.offset nbands = self.nbands nSubRows = len(rows) # Rows in sub-image nSubCols = len(cols) # Cols in sub-image d_band = self.sample_size d_col = d_band * self.nbands d_row = d_col * self.ncols vals = array.array(byte_typecode) nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands if bands is not None: allBands = 0 nSubBands = len(bands) else: allBands = 1 bands = list(range(self.nbands)) nSubBands = self.nbands f = self.fid # Pixel format is BIP for i in rows: for j in cols: if allBands: f.seek(offset + i * d_row + j * d_col, 0) vals.fromfile(f, nSubBands * sample_size) else: for k in bands: f.seek(offset + i * d_row + j * d_col + k * d_band, 0) vals.fromfile(f, sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) arr = arr.reshape(nSubRows, nSubCols, nSubBands) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_datum(self, i, j, k, use_memmap=True): '''Reads the band `k` value for pixel at row `i` and column `j`. Arguments: `i`, `j`, `k` (integer): Row, column and band index, respectively. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Using this function is not an efficient way to iterate over bands or pixels. For such cases, use readBands or readPixel instead. ''' if self._memmap is not None and use_memmap is True: datum = self._memmap[i, j, k] if self.scale_factor != 1: datum /= float(self.scale_factor) return datum vals = array.array(byte_typecode) f = self.fid f.seek(self.offset + self.sample_size * (self.nbands * (i * self.ncols + j) + k), 0) # Pixel format is BIP so read entire pixel. vals.fromfile(f, self.sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) return arr.tolist()[0] / float(self.scale_factor) spectral-0.22.4/spectral/io/bsqfile.py000066400000000000000000000320671412674721200176550ustar00rootroot00000000000000''' Code for handling files that are band sequential (BSQ). ''' from __future__ import absolute_import, division, print_function, unicode_literals import array import logging import numpy as np import os import sys import spectral as spy from ..utilities.python23 import typecode, tobytes, frombytes from .spyfile import SpyFile, MemmapFile byte_typecode = typecode('b') class BsqFile(SpyFile, MemmapFile): ''' A class to represent image files stored with bands sequential. ''' def __init__(self, params, metadata=None): self.interleave = spy.BSQ if metadata is None: metadata = {} SpyFile.__init__(self, params, metadata) self._memmap = self._open_memmap('r') def _open_memmap(self, mode): logger = logging.getLogger('spectral') if (os.path.getsize(self.filename) < sys.maxsize): try: (R, C, B) = self.shape return np.memmap(self.filename, dtype=self.dtype, mode=mode, offset=self.offset, shape=(B, R, C)) except: logger.debug('Unable to create memmap interface.') return None else: return None def read_band(self, band, use_memmap=True): '''Reads a single band from the image. Arguments: `band` (int): Index of band to read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxN` array of values for the specified band. ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[band, :, :]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data vals = array.array(byte_typecode) offset = self.offset + band * self.sample_size * \ self.nrows * self.ncols f = self.fid # Pixel format is BSQ, so read the whole band at once. f.seek(offset, 0) vals.fromfile(f, self.nrows * self.ncols * self.sample_size) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) arr = arr.reshape(self.nrows, self.ncols) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_bands(self, bands, use_memmap=False): '''Reads multiple bands from the image. Arguments: `bands` (list of ints): Indices of bands to read. `use_memmap` (bool, default False): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array of values for the specified bands. `M` and `N` are the number of rows & columns in the image and `L` equals len(`bands`). ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[bands, :, :]).transpose((1, 2, 0)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data f = self.fid arr = np.zeros((self.nrows, self.ncols, len(bands)), dtype=self.dtype) for j in range(len(bands)): vals = array.array(byte_typecode) offset = self.offset + (bands[j]) * self.sample_size \ * self.nrows * self.ncols # Pixel format is BSQ, so read an entire band at time. f.seek(offset, 0) vals.fromfile(f, self.nrows * self.ncols * self.sample_size) band = np.frombuffer(tobytes(vals), dtype=self.dtype) arr[:, :, j] = band.reshape(self.nrows, self.ncols) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_pixel(self, row, col, use_memmap=True): '''Reads the pixel at position (row,col) from the file. Arguments: `row`, `col` (int): Indices of the row & column for the pixel `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` A length-`B` array, where `B` is the number of image bands. ''' if self._memmap is not None and use_memmap is True: data = np.array(self._memmap[:, row, col]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data vals = array.array(byte_typecode) delta = self.sample_size * (self.nbands - 1) offset = self.offset + row * self.nbands * self.ncols \ * self.sample_size + col * self.sample_size f = self.fid nPixels = self.nrows * self.ncols ncols = self.ncols sampleSize = self.sample_size bandSize = sampleSize * nPixels rowSize = sampleSize * self.ncols for i in range(self.nbands): f.seek(self.offset + i * bandSize + row * rowSize + col * sampleSize, 0) vals.fromfile(f, sampleSize) pixel = np.frombuffer(tobytes(vals), dtype=self.dtype) if self.scale_factor != 1: return pixel / float(self.scale_factor) return pixel def read_subregion(self, row_bounds, col_bounds, bands=None, use_memmap=True): ''' Reads a contiguous rectangular sub-region from the image. Arguments: `row_bounds` (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. `col_bounds` (2-tuple of ints): (a, b) -> Columnss a through b-1 will be read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array. ''' if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap[:, row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1]]) else: data = np.array( self._memmap[bands, row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1]]) data = data.transpose((1, 2, 0)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data nSubRows = row_bounds[1] - row_bounds[0] # Rows in sub-image nSubCols = col_bounds[1] - col_bounds[0] # Cols in sub-image f = self.fid f.seek(self.offset, 0) # Increments between bands if bands is None: # Read all bands. bands = list(range(self.nbands)) arr = np.zeros((nSubRows, nSubCols, len(bands)), dtype=self.dtype) nrows = self.nrows ncols = self.ncols sampleSize = self.sample_size bandSize = nrows * ncols * sampleSize colStartOffset = col_bounds[0] * sampleSize rowSize = ncols * sampleSize rowStartOffset = row_bounds[0] * rowSize nSubBands = len(bands) # Pixel format is BSQ for i in bands: vals = array.array(byte_typecode) bandOffset = i * bandSize for j in range(row_bounds[0], row_bounds[1]): f.seek(self.offset + bandOffset + j * rowSize + colStartOffset, 0) vals.fromfile(f, nSubCols * sampleSize) subArray = np.frombuffer(tobytes(vals), dtype=self.dtype).reshape((nSubRows, nSubCols)) arr[:, :, i] = subArray if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_subimage(self, rows, cols, bands=None, use_memmap=False): ''' Reads arbitrary rows, columns, and bands from the image. Arguments: `rows` (list of ints): Indices of rows to read. `cols` (list of ints): Indices of columns to read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default False): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`), and `L` = len(bands) (or # of image bands if `bands` == None). ''' if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap[:].take(rows, 1).take(cols, 2)) else: data = np.array( self._memmap.take(bands, 0).take(rows, 1).take(cols, 2)) data = data.transpose((1, 2, 0)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data nSubRows = len(rows) # Rows in sub-image nSubCols = len(cols) # Cols in sub-image d_col = self.sample_size d_band = d_col * self.ncols d_row = d_band * self.nbands f = self.fid f.seek(self.offset, 0) # Increments between bands if bands is None: # Read all bands. bands = list(range(self.nbands)) nSubBands = len(bands) arr = np.zeros((nSubRows, nSubCols, nSubBands), dtype=self.dtype) offset = self.offset vals = array.array(byte_typecode) nrows = self.nrows ncols = self.ncols sampleSize = self.sample_size bandSize = nrows * ncols * sampleSize sampleSize = self.sample_size rowSize = ncols * sampleSize # Pixel format is BSQ for i in bands: bandOffset = offset + i * bandSize for j in rows: rowOffset = j * rowSize for k in cols: f.seek(bandOffset + rowOffset + k * sampleSize, 0) vals.fromfile(f, sampleSize) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) arr = arr.reshape(nSubBands, nSubRows, nSubCols) arr = np.transpose(arr, (1, 2, 0)) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr def read_datum(self, i, j, k, use_memmap=True): '''Reads the band `k` value for pixel at row `i` and column `j`. Arguments: `i`, `j`, `k` (integer): Row, column and band index, respectively. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Using this function is not an efficient way to iterate over bands or pixels. For such cases, use readBands or readPixel instead. ''' if self._memmap is not None and use_memmap is True: datum = self._memmap[k, i, j] if self.scale_factor != 1: datum /= float(self.scale_factor) return datum nrows = self.nrows ncols = self.ncols sampleSize = self.sample_size self.fid.seek(self.offset + (k * nrows * ncols + i * ncols + j) * sampleSize, 0) vals = array.array(byte_typecode) vals.fromfile(self.fid, sampleSize) arr = np.frombuffer(tobytes(vals), dtype=self.dtype) return arr.tolist()[0] / float(self.scale_factor) spectral-0.22.4/spectral/io/envi.py000066400000000000000000001072701412674721200171700ustar00rootroot00000000000000''' ENVI [#envi-trademark]_ is a popular commercial software package for processing and analyzing geospatial imagery. SPy supports reading imagery with associated ENVI header files and reading & writing spectral libraries with ENVI headers. ENVI files are opened automatically by the SPy :func:`~spectral.image` function but can also be called explicitly. It may be necessary to open an ENVI file explicitly if the data file is in a separate directory from the header or if the data file has an unusual file extension that SPy can not identify. >>> import spectral.io.envi as envi >>> img = envi.open('cup95eff.int.hdr', '/Users/thomas/spectral_data/cup95eff.int') .. [#envi-trademark] ENVI is a registered trademark of Exelis, Inc. ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy as np import os import sys import warnings import spectral as spy from ..spectral import BandInfo from ..utilities.python23 import IS_PYTHON3, is_string from ..utilities.errors import SpyException from .bilfile import BilFile from .bipfile import BipFile from .bsqfile import BsqFile from .spyfile import (FileNotFoundError, find_file_path, interleave_transpose, InvalidFileError, SpyFile) if IS_PYTHON3: import builtins else: import __builtin__ as builtins logger = logging.getLogger('spectral') # Known ENVI data file extensions. Upper and lower case versions will be # recognized, as well as interleaves ('bil', 'bip', 'bsq'), and no extension. KNOWN_EXTS = ['img', 'dat', 'sli', 'hyspex', 'raw'] dtype_map = [('1', np.uint8), # unsigned byte ('2', np.int16), # 16-bit int ('3', np.int32), # 32-bit int ('4', np.float32), # 32-bit float ('5', np.float64), # 64-bit float ('6', np.complex64), # 2x32-bit complex ('9', np.complex128), # 2x64-bit complex ('12', np.uint16), # 16-bit unsigned int ('13', np.uint32), # 32-bit unsigned int ('14', np.int64), # 64-bit int ('15', np.uint64)] # 64-bit unsigned int envi_to_dtype = dict((k, np.dtype(v).char) for (k, v) in dtype_map) dtype_to_envi = dict(tuple(reversed(item)) for item in list(envi_to_dtype.items())) class EnviException(SpyException): '''Base class for ENVI file-related exceptions.''' pass class EnviDataTypeError(EnviException, TypeError): '''Raised when saving invalid image data type to ENVI format. ''' def __init__(self, dtype): msg = 'Image data type "{0}" can not be saved to ENVI data file. ' \ 'Call spectral.envi.get_supported_dtypes for a list of supported ' \ 'data type names.'.format(np.dtype(dtype).name) super(EnviDataTypeError, self).__init__(msg) class EnviFeatureNotSupported(EnviException, NotImplementedError): '''A specified ENVI capability is not supported by the spectral module.''' pass class FileNotAnEnviHeader(EnviException, InvalidFileError): '''Raised when "ENVI" does not appear on the first line of the file.''' def __init__(self, msg): super(FileNotAnEnviHeader, self).__init__(msg) class MissingEnviHeaderParameter(EnviException): '''Raised when a mandatory header parameter is missing.''' def __init__(self, param): msg = 'Mandatory parameter "%s" missing from header file.' % param super(MissingEnviHeaderParameter, self).__init__(msg) class EnviHeaderParsingError(EnviException, InvalidFileError): '''Raised upon failure to parse parameter/value pairs from a file.''' def __init__(self): msg = 'Failed to parse ENVI header file.' super(EnviHeaderParsingError, self).__init__(msg) class EnviDataFileNotFoundError(EnviException, FileNotFoundError): '''Raised when data file associated with a header is not found.''' pass def _validate_dtype(dtype): '''Raises EnviDataTypeError if dtype can not be written to ENVI file.''' typename = np.dtype(dtype).name if typename not in [np.dtype(t).name for t in list(dtype_to_envi.keys())]: raise EnviDataTypeError(dtype) def get_supported_dtypes(): '''Returns list of names of image data types supported by ENVI format.''' return [np.dtype(t).name for t in list(dtype_to_envi.keys())] def read_envi_header(file): ''' USAGE: hdr = read_envi_header(file) Reads an ENVI ".hdr" file header and returns the parameters in a dictionary as strings. Header field names are treated as case insensitive and all keys in the dictionary are lowercase. ''' f = builtins.open(file, 'r') try: starts_with_ENVI = f.readline().strip().startswith('ENVI') except UnicodeDecodeError: msg = 'File does not appear to be an ENVI header (appears to be a ' \ 'binary file).' f.close() raise FileNotAnEnviHeader(msg) else: if not starts_with_ENVI: msg = 'File does not appear to be an ENVI header (missing "ENVI" \ at beginning of first line).' f.close() raise FileNotAnEnviHeader(msg) lines = f.readlines() f.close() dict = {} have_nonlowercase_param = False support_nonlowercase_params = spy.settings.envi_support_nonlowercase_params try: while lines: line = lines.pop(0) if line.find('=') == -1: continue if line[0] == ';': continue (key, sep, val) = line.partition('=') key = key.strip() if not key.islower(): have_nonlowercase_param = True if not support_nonlowercase_params: key = key.lower() val = val.strip() if val and val[0] == '{': str = val.strip() while str[-1] != '}': line = lines.pop(0) if line[0] == ';': continue str += '\n' + line.strip() if key == 'description': dict[key] = str.strip('{}').strip() else: vals = str[1:-1].split(',') for j in range(len(vals)): vals[j] = vals[j].strip() dict[key] = vals else: dict[key] = val if have_nonlowercase_param and not support_nonlowercase_params: msg = 'Parameters with non-lowercase names encountered ' \ 'and converted to lowercase. To retain source file ' \ 'parameter name capitalization, set ' \ 'spectral.settings.envi_support_nonlowercase_params to ' \ 'True.' warnings.warn(msg) logger.debug('ENVI header parameter names converted to lower case.') return dict except: raise EnviHeaderParsingError() def gen_params(envi_header): ''' Parse an envi_header to a `Params` object. Arguments: `envi_header` (dict or file_name): A dict or an `.hdr` file name ''' if not isinstance(envi_header, dict): headerPath = find_file_path(envi_header) h = read_envi_header(headerPath) else: h = envi_header class Params: pass p = Params() p.nbands = int(h["bands"]) p.nrows = int(h["lines"]) p.ncols = int(h["samples"]) p.offset = int(h["header offset"]) if "header offset" in h else int(0) p.byte_order = int(h["byte order"]) p.dtype = np.dtype(envi_to_dtype[str(h["data type"])]).str if p.byte_order != spy.byte_order: p.dtype = np.dtype(p.dtype).newbyteorder().str p.filename = None return p def _has_frame_offset(params): ''' Returns True if header params indicate non-zero frame offsets. Arguments: `params` (dict): Dictionary of header parameters assocaited with hdr file. Returns: bool This function returns True when either "major frame offsets" or "minor frame offsets" is specified and contains a non-zero value. ''' for param in ['major frame offsets', 'minor frame offsets']: if param in params: val = params[param] if np.iterable(val): offsets = [int(x) for x in val] else: offsets = [int(val)] * 2 if not np.all(np.equal(offsets, 0)): return True return False def check_compatibility(header): ''' Verifies that all features of an ENVI header are supported. ''' if is_string(header): header = read_envi_header(find_file_path(header)) mandatory_params = ['lines', 'samples', 'bands', 'data type', 'interleave', 'byte order'] for p in mandatory_params: if p not in header: raise MissingEnviHeaderParameter(p) if _has_frame_offset(header): raise EnviFeatureNotSupported( 'ENVI image frame offsets are not supported.') def open(file, image=None): ''' Opens an image or spectral library with an associated ENVI HDR header file. Arguments: `file` (str): Name of the header file for the image. `image` (str): Optional name of the associated image data file. Returns: :class:`spectral.SpyFile` or :class:`spectral.io.envi.SpectralLibrary` object. Raises: TypeError, EnviDataFileNotFoundError If the specified file is not found in the current directory, all directories listed in the SPECTRAL_DATA environment variable will be searched until the file is found. Based on the name of the header file, this function will search for the image file in the same directory as the header, looking for a file with the same name as the header but different extension. Extensions recognized are .img, .dat, .sli, and no extension. Capitalized versions of the file extensions are also searched. ''' header_path = find_file_path(file) h = read_envi_header(header_path) check_compatibility(h) p = gen_params(h) inter = h["interleave"] # Validate image file name if not image: # Try to determine the name of the image file (header_path_title, header_ext) = os.path.splitext(header_path) if header_ext.lower() == '.hdr': exts = [ext.lower() for ext in KNOWN_EXTS] + [inter.lower()] exts = [''] + exts + [ext.upper() for ext in exts] for ext in exts: if len(ext) == 0: testname = header_path_title else: testname = header_path_title + '.' + ext if os.path.isfile(testname): image = testname break if not image: msg = 'Unable to determine the ENVI data file name for the ' \ 'given header file. You can specify the data file by passing ' \ 'its name as the optional `image` argument to envi.open.' raise EnviDataFileNotFoundError(msg) else: image = find_file_path(image) p.filename = image if h.get('file type') == 'ENVI Spectral Library': # File is a spectral library data = np.fromfile(p.filename, p.dtype, p.ncols * p.nrows) data.shape = (p.nrows, p.ncols) return SpectralLibrary(data, h, p) # Create the appropriate object type for the interleave format. inter = h["interleave"] if inter == 'bil' or inter == 'BIL': img = BilFile(p, h) elif inter == 'bip' or inter == 'BIP': img = BipFile(p, h) else: img = BsqFile(p, h) img.scale_factor = float(h.get('reflectance scale factor', 1.0)) # Add band info if 'wavelength' in h: try: img.bands.centers = [float(b) for b in h['wavelength']] except: pass if 'fwhm' in h: try: img.bands.bandwidths = [float(f) for f in h['fwhm']] except: pass img.bands.band_unit = h.get('wavelength units', None) if 'bbl' in h: try: h['bbl'] = [int(float(b)) for b in h['bbl']] except: logger.warning('Unable to parse bad band list (bbl) in ENVI ' \ 'header as integers.') return img def check_new_filename(hdr_file, img_ext, force): '''Raises an exception if the associated header or image file names exist. ''' if img_ext is None: img_ext = '' elif len(img_ext) > 0 and img_ext[0] != '.': img_ext = '.' + img_ext hdr_file = os.path.realpath(hdr_file) (base, ext) = os.path.splitext(hdr_file) if ext.lower() != '.hdr': raise EnviException('Header file name must end in ".hdr" or ".HDR".') image_file = base + img_ext if not force: if os.path.isfile(hdr_file): raise EnviException('Header file %s already exists. Use `force` ' 'keyword to force overwrite.' % hdr_file) if os.path.isfile(image_file): raise EnviException('Image file %s already exists. Use `force` ' 'keyword to force overwrite.' % image_file) return (hdr_file, image_file) def save_image(hdr_file, image, **kwargs): ''' Saves an image to disk. Arguments: `hdr_file` (str): Header file (with ".hdr" extension) name with path. `image` (SpyFile object or numpy.ndarray): The image to save. Keyword Arguments: `dtype` (numpy dtype or type string): The numpy data type with which to store the image. For example, to store the image in 16-bit unsigned integer format, the argument could be any of `numpy.uint16`, "u2", "uint16", or "H". `force` (bool): If the associated image file or header already exist and `force` is True, the files will be overwritten; otherwise, if either of the files exist, an exception will be raised. `ext` (str or None): The extension to use for the image file. If not specified, the default extension ".img" will be used. If `ext` is an empty string or is None, the image file will have the same name as the header but without the ".hdr" extension. `interleave` (str): The band interleave format to use in the file. This argument should be one of "bil", "bip", or "bsq". If not specified, the image will be written in BIP interleave. `byteorder` (int or string): Specifies the byte order (endian-ness) of the data as written to disk. For little endian, this value should be either 0 or "little". For big endian, it should be either 1 or "big". If not specified, native byte order will be used. `metadata` (dict): A dict containing ENVI header parameters (e.g., parameters extracted from a source image). Example:: >>> # Save the first 10 principal components of an image >>> data = open_image('92AV3C.lan').load() >>> pc = principal_components(data) >>> pcdata = pc.reduce(num=10).transform(data) >>> envi.save_image('pcimage.hdr', pcdata, dtype=np.float32) If the source image being saved was already in ENVI format, then the SpyFile object for that image will contain a `metadata` dict that can be passed as the `metadata` keyword. However, care should be taken to ensure that all the metadata fields from the source image are still accurate (e.g., band names or wavelengths will no longer be correct if the data being saved are from a principal components transformation). ''' data, metadata = _prepared_data_and_metadata(hdr_file, image, **kwargs) metadata['file type'] = "ENVI Standard" _write_image(hdr_file, data, metadata, **kwargs) def save_classification(hdr_file, image, **kwargs): '''Saves a classification image to disk. Arguments: `hdr_file` (str): Header file (with ".hdr" extension) name with path. `image` (SpyFile object or numpy.ndarray): The image to save. Keyword Arguments: `dtype` (numpy dtype or type string): The numpy data type with which to store the image. For example, to store the image in 16-bit unsigned integer format, the argument could be any of `numpy.uint16`, "u2", "uint16", or "H". `force` (bool): If the associated image file or header already exist and `force` is True, the files will be overwritten; otherwise, if either of the files exist, an exception will be raised. `ext` (str): The extension to use for the image file. If not specified, the default extension ".img" will be used. If `ext` is an empty string, the image file will have the same name as the header but without the ".hdr" extension. `interleave` (str): The band interleave format to use in the file. This argument should be one of "bil", "bip", or "bsq". If not specified, the image will be written in BIP interleave. `byteorder` (int or string): Specifies the byte order (endian-ness) of the data as written to disk. For little endian, this value should be either 0 or "little". For big endian, it should be either 1 or "big". If not specified, native byte order will be used. `metadata` (dict): A dict containing ENVI header parameters (e.g., parameters extracted from a source image). `class_names` (array of strings): For classification results, specifies the names to assign each integer in the class map being written. If not given, default class names are created. `class_colors` (array of RGB-tuples): For classification results, specifies colors to assign each integer in the class map being written. If not given, default colors are automatically generated. If the source image being saved was already in ENVI format, then the SpyFile object for that image will contain a `metadata` dict that can be passed as the `metadata` keyword. However, care should be taken to ensure that all the metadata fields from the source image are still accurate (e.g., wavelengths do not apply to classification results). ''' data, metadata = _prepared_data_and_metadata(hdr_file, image, **kwargs) metadata['file type'] = "ENVI Classification" class_names = kwargs.get('class_names', metadata.get('class_names', None)) class_colors = kwargs.get('class_colors', metadata.get('class_colors', None)) if class_names is None: # guess the number of classes and create default class names n_classes = int(np.max(data) + 1) metadata['classes'] = str(n_classes) metadata['class names'] = (['Unclassified'] + ['Class ' + str(i) for i in range(1, n_classes)]) # if keyword is given, override whatever is in the metadata dict else: n_classes = int(max(np.max(data) + 1, len(class_names))) metadata['class names'] = class_names metadata['classes'] = str(n_classes) # the resulting value for 'class lookup' needs to be a flattened array. colors = [] if class_colors is not None: try: for color in class_colors: # call list() in case color is a numpy array colors += list(color) except: # list was already flattened colors = list(class_colors) if len(colors) < n_classes * 3: colors = [] for i in range(n_classes): colors += list(spy.spy_colors[i % len(spy.spy_colors)]) metadata['class lookup'] = colors _write_image(hdr_file, data, metadata, **kwargs) def _prepared_data_and_metadata(hdr_file, image, **kwargs): ''' Return data array and metadata dict representing `image`. ''' endian_out = str(kwargs.get('byteorder', sys.byteorder)).lower() if endian_out in ('0', 'little'): endian_out = 'little' elif endian_out in ('1', 'big'): endian_out = 'big' else: raise ValueError('Invalid byte order: "%s".' % endian_out) if isinstance(image, np.ndarray): data = image src_interleave = 'bip' if len(data.shape) == 2: data = data[:, :, np.newaxis] swap = False metadata = {} elif isinstance(image, SpyFile): if image.using_memmap is True: data = image._memmap src_interleave = {spy.BSQ: 'bsq', spy.BIL: 'bil', spy.BIP: 'bip'}[image.interleave] swap = image.swap else: data = image.load(dtype=image.dtype, scale=False) src_interleave = 'bip' swap = False metadata = image.metadata.copy() else: data = image.load() src_interleave = 'bip' swap = False if hasattr(image, 'metadata'): metadata = image.metadata.copy() else: metadata = {} metadata.update(kwargs.get('metadata', {})) add_image_info_to_metadata(image, metadata) if hasattr(image, 'bands'): add_band_info_to_metadata(image.bands, metadata) dtype = np.dtype(kwargs.get('dtype', data.dtype)).char _validate_dtype(dtype) if dtype != data.dtype.char: data = data.astype(dtype) metadata['data type'] = dtype_to_envi[dtype] interleave = kwargs.get('interleave', 'bip').lower() if interleave not in ['bil', 'bip', 'bsq']: raise ValueError('Invalid interleave: %s' % str(kwargs['interleave'])) if interleave != src_interleave: data = data.transpose(interleave_transpose(src_interleave, interleave)) metadata['interleave'] = interleave metadata['byte order'] = 1 if endian_out == 'big' else 0 if (endian_out == sys.byteorder and not data.dtype.isnative) or \ (endian_out != sys.byteorder and data.dtype.isnative): data = data.byteswap() return data, metadata # A few header parameters need to be set no matter what is provided in the # supplied metadata. def add_image_info_to_metadata(image, metadata): ''' Set keys in metadata dict to values appropriate for image. ''' if isinstance(image, SpyFile) and image.scale_factor != 1: metadata['reflectance scale factor'] = image.scale_factor # Always write data from start of file, regardless of what was in # the provided metadata. offset = int(metadata.get('header offset', 0)) if offset != 0: logger.debug('Ignoring non-zero header offset in provided metadata.') metadata['header offset'] = 0 metadata['lines'] = image.shape[0] metadata['samples'] = image.shape[1] if len(image.shape) == 3: metadata['bands'] = image.shape[2] else: metadata['bands'] = 1 def add_band_info_to_metadata(bands, metadata, overwrite=False): '''Adds BandInfo data to the metadata dict. Data is only added if not already present, unless `overwrite` is True. ''' if bands.centers is not None and (overwrite is True or 'wavelength' not in metadata): metadata['wavelength'] = bands.centers if bands.bandwidths is not None and (overwrite is True or 'fwhm' not in metadata): metadata['fwhm'] = bands.bandwidths if bands.band_unit is not None and (overwrite is True or 'wavelength units' not in metadata): metadata['wavelength units'] = bands.band_unit def _write_image(hdr_file, data, header, **kwargs): ''' Write `data` as an ENVI file using the metadata in `header`. ''' check_compatibility(header) force = kwargs.get('force', False) img_ext = kwargs.get('ext', '.img') (hdr_file, img_file) = check_new_filename(hdr_file, img_ext, force) write_envi_header(hdr_file, header, is_library=False) logger.debug('Saving', img_file) # bufsize = data.shape[0] * data.shape[1] * np.dtype(dtype).itemsize bufsize = data.shape[0] * data.shape[1] * data.dtype.itemsize fout = builtins.open(img_file, 'wb', bufsize) fout.write(data.tostring()) fout.close() def create_image(hdr_file, metadata=None, **kwargs): ''' Creates an image file and ENVI header with a memmep array for write access. Arguments: `hdr_file` (str): Header file (with ".hdr" extension) name with path. `metadata` (dict): Metadata to specify the image file format. The following parameters (in ENVI header format) are required, if not specified via corresponding keyword arguments: "bands", "lines", "samples", and "data type". Keyword Arguments: `dtype` (numpy dtype or type string): The numpy data type with which to store the image. For example, to store the image in 16-bit unsigned integer format, the argument could be any of `numpy.uint16`, "u2", "uint16", or "H". If this keyword is given, it will override the "data type" parameter in the `metadata` argument. `force` (bool, False by default): If the associated image file or header already exist and `force` is True, the files will be overwritten; otherwise, if either of the files exist, an exception will be raised. `ext` (str): The extension to use for the image file. If not specified, the default extension ".img" will be used. If `ext` is an empty string, the image file will have the same name as the header but without the ".hdr" extension. `interleave` (str): Must be one of "bil", "bip", or "bsq". This keyword supercedes the value of "interleave" in the metadata argument, if given. If no interleave is specified (via keyword or `metadata`), "bip" is assumed. `shape` (tuple of integers): Specifies the number of rows, columns, and bands in the image. This keyword should be either of the form (R, C, B) or (R, C), where R, C, and B specify the number or rows, columns, and bands, respectively. If B is omitted, the number of bands is assumed to be one. If this keyword is given, its values supercede the values of "bands", "lines", and "samples" if they are present in the `metadata` argument. `offset` (integer, default 0): The offset (in bytes) of image data from the beginning of the file. This value supercedes the value of "header offset" in the metadata argument (if given). Returns: `SpyFile` object: To access a `numpy.memmap` for the returned `SpyFile` object, call the `open_memmap` method of the returned object. Examples: Creating a new image from metadata:: >>> md = {'lines': 30, 'samples': 40, 'bands': 50, 'data type': 12} >>> img = envi.create_image('new_image.hdr', md) Creating a new image via keywords:: >>> img = envi.create_image('new_image2.hdr', shape=(30, 40, 50), dtype=np.uint16) Writing to the new image using a memmap interface:: >>> # Set all band values for a single pixel to 100. >>> mm = img.open_memmap(writable=True) >>> mm[30, 30] = 100 ''' force = kwargs.get('force', False) img_ext = kwargs.get('ext', '.img') memmap_mode = kwargs.get('memmap_mode', 'w+') (hdr_file, img_file) = check_new_filename(hdr_file, img_ext, force) default_metadata = {'header offset': 0, 'interleave': 'bip'} if metadata is None: metadata = default_metadata else: default_metadata.update(metadata) metadata = default_metadata # Keyword args supercede metadata dict if 'shape' in kwargs: shape = kwargs['shape'] metadata['lines'] = shape[0] metadata['samples'] = shape[1] if len(shape) == 3: metadata['bands'] = shape[2] else: metadata['bands'] = 1 if 'offset' in kwargs: metadata['header offset'] = kwargs['offset'] if 'dtype' in kwargs: metadata['data type'] = dtype_to_envi[np.dtype(kwargs['dtype']).char] if 'interleave' in kwargs: metadata['interleave'] = kwargs['interleave'] metadata['byte order'] = spy.byte_order # Verify minimal set of parameters have been provided if 'lines' not in metadata: raise EnviException('Number of image rows is not defined.') elif 'samples' not in metadata: raise EnviException('Number of image columns is not defined.') elif 'bands' not in metadata: raise EnviException('Number of image bands is not defined.') elif 'samples' not in metadata: raise EnviException('Number of image columns is not defined.') elif 'data type' not in metadata: raise EnviException('Image data type is not defined.') params = gen_params(metadata) dt = np.dtype(params.dtype).char _validate_dtype(dt) params.filename = img_file is_library = False if metadata.get('file type') == 'ENVI Spectral Library': is_library = True raise NotImplementedError('ENVI Spectral Library cannot be created ') # Create the appropriate object type -> the memmap (=image) will be # created on disk inter = metadata["interleave"] (R, C, B) = (params.nrows, params.ncols, params.nbands) if inter.lower() not in ['bil', 'bip', 'bsq']: raise ValueError('Invalid interleave specified: %s.' % str(inter)) if inter.lower() == 'bil': memmap = np.memmap(img_file, dtype=dt, mode=memmap_mode, offset=params.offset, shape=(R, B, C)) img = BilFile(params, metadata) img._memmap = memmap elif inter.lower() == 'bip': memmap = np.memmap(img_file, dtype=dt, mode=memmap_mode, offset=params.offset, shape=(R, C, B)) img = BipFile(params, metadata) img._memmap = memmap else: memmap = np.memmap(img_file, dtype=dt, mode=memmap_mode, offset=params.offset, shape=(B, R, C)) img = BsqFile(params, metadata) img._memmap = memmap # Write the header file after the image to assure write success write_envi_header(hdr_file, metadata, is_library=is_library) return img class SpectralLibrary: ''' The envi.SpectralLibrary class holds data contained in an ENVI-formatted spectral library file (.sli files), which stores data as specified by a corresponding .hdr header file. The primary members of an Envi.SpectralLibrary object are: `spectra` (:class:`numpy.ndarray`): A subscriptable array of all spectra in the library. `spectra` will have shape `CxB`, where `C` is the number of spectra in the library and `B` is the number of bands for each spectrum. `names` (list of str): A length-`C` list of names corresponding to the spectra. `bands` (:class:`spectral.BandInfo`): Spectral bands associated with the library spectra. ''' def __init__(self, data, header=None, params=None): '''Creates a new spectral library array Arguments: `data` (array-like): Array with shape `CxB`, where `C` is the number of spectra in the library and `B` is the number of bands for each spectrum. `header` (dict): Optional dict of ENVI header parameters. `params` (Params): Optional SpyFile Params object ''' self.spectra = data (n_spectra, n_bands) = data.shape if header is None: header = {} header = header.copy() self.bands = BandInfo() centers = header.pop('wavelength', None) if centers is not None: if len(centers) != n_bands: raise ValueError('Number of band centers does not match data') self.bands.centers = [float(c) for c in centers] fwhm = header.pop('fwhm', None) if fwhm is not None: if len(fwhm) != n_bands: raise ValueError('Number of fwhm values does not match data') self.bands.bandwidths = [float(f) for f in fwhm] names = header.pop('spectra names', None) if names is not None: if len(names) != n_spectra: raise ValueError('Number of spectrum names does not match data') self.names = names else: self.names = [str(i + 1) for i in range(n_spectra)] self.bands.band_unit = header.get('wavelength units', "") self.bands.band_quantity = "Wavelength" self.params = params self.metadata = header.copy() self.metadata['data ignore value'] = 'NaN' def save(self, file_basename, description=None): ''' Saves the spectral library to a library file. Arguments: `file_basename` (str): Name of the file (without extension) to save. `description` (str): Optional text description of the library. This method creates two files: `file_basename`.hdr and `file_basename`.sli. ''' meta = self.metadata.copy() meta['samples'] = self.spectra.shape[1] meta['lines'] = self.spectra.shape[0] meta['bands'] = 1 meta['header offset'] = 0 meta['data type'] = 4 # 32-bit float meta['interleave'] = 'bsq' meta['byte order'] = spy.byte_order meta['wavelength units'] = self.bands.band_unit meta['spectra names'] = [str(n) for n in self.names] if self.bands.centers is not None: meta['wavelength'] = self.bands.centers if self.bands.bandwidths is not None: meta['fwhm'] = self.bands.bandwidths if (description): meta['description'] = description write_envi_header(file_basename + '.hdr', meta, True) fout = builtins.open(file_basename + '.sli', 'wb') self.spectra.astype('f').tofile(fout) fout.close() def _write_header_param(fout, paramName, paramVal): if paramName.lower() == 'description': valStr = '{\n%s}' % '\n'.join([' ' + line for line in paramVal.split('\n')]) elif not is_string(paramVal) and hasattr(paramVal, '__len__'): valStr = '{ %s }' % ( ' , '.join([str(v).replace(',', '-') for v in paramVal]),) else: valStr = str(paramVal) fout.write('%s = %s\n' % (paramName, valStr)) def write_envi_header(fileName, header_dict, is_library=False): fout = builtins.open(fileName, 'w') d = {} d.update(header_dict) if is_library: d['file type'] = 'ENVI Spectral Library' elif 'file type' not in d: d['file type'] = 'ENVI Standard' fout.write('ENVI\n') # Write the standard parameters at the top of the file std_params = ['description', 'samples', 'lines', 'bands', 'header offset', 'file type', 'data type', 'interleave', 'sensor type', 'byte order', 'reflectance scale factor', 'map info'] for k in std_params: if k in d: _write_header_param(fout, k, d[k]) for k in d: if k not in std_params: _write_header_param(fout, k, d[k]) fout.close() spectral-0.22.4/spectral/io/erdas.py000066400000000000000000000156701412674721200173270ustar00rootroot00000000000000''' Functions for reading Erdas files. ''' # Following description accessed on 2011-01-25 at # http://www.pcigeomatics.com/cgi-bin/pcihlp/ERDASWR|IMAGE+FORMAT # # The ERDAS image file format contains a header record (128 bytes), followed by # the image data. The image data is arranged in a Band Interleaved by Line # (BIL) format. Each file is virtually unlimited in size - the file structure # allows up to 274 billion bytes. The file consists of 512-byte records. # # ERDAS IMAGE FILE FORMAT # +----------------------------------------------------------+ # | Record 1 (bytes 1 to 128) Header | # | -------------------------------- | # | | # | Bytes Type Contents | # | | # | 1- 6 ASCII Descriptor (HEAD74 or HEADER) | # | 7- 8 I*2 Type of data:0=8 bit /1=4 bit/2=16 bit| # | 9-10 I*2 Number of Channels | # | 11-16 Unused | # | 17-20 I*4 Number of Pixels, if HEAD74 | # | (R*4 Number of Pixels, if HEADER) | # | 21-24 I*4 Number of Lines, if HEAD74 | # | (R*4 Number of Lines, if HEADER) | # | 25-28 I*4 X-coordinate of 1st pixel, if HEAD74 | # | (R*4 X-coordinate of 1st pixel, if HEADER) | # | 29-32 I*4 Y-coordinate of 1st pixel, if HEAD74 | # | (R*4 Y-coordinate of 1st pixel, if HEADER) | # | 33-88 Unused | # | 89-90 I*2 Integer which indicates Map type | # | 91-92 I*2 Number of classes in the data set | # | 93-106 Unused | # | 107-108 I*2 Units of area of each pixel | # | 0=NONE, 1=ACRE, 2=HECTAR, 3=OTHER | # | 109-112 R*4 Number of pixel area units | # | 113-116 R*4 Map X-coordinate of upper left corner | # | 117-120 R*4 Map Y-coordinate of upper left corner | # | 121-124 R*4 X-pixel size | # | 125-128 R*4 Y-pixel size | # | | # | Data files values begin in bytes 129 and cross over | # | record boundaries as necessary. | # | Data are arranged in following order: | # | | # | L - Lines; C - Channels; P - Pixels per line; | # | | # | Pixels 1 through x of line 1, band 1 | # | Pixels 1 through x of line 1, band n | # | | # | Pixels 1 through x of line 2, band 1 | # | Pixels 1 through x of line 2, band n | # | | # | Pixels 1 through x of line y, band 1 | # | Pixels 1 through x of line y, band n | # +----------------------------------------------------------+ from __future__ import absolute_import, division, print_function, unicode_literals import array import numpy as np import sys import spectral as spy from ..utilities.python23 import IS_PYTHON3, typecode from .bilfile import BilFile from .spyfile import find_file_path, InvalidFileError from .spyfile import InvalidFileError if IS_PYTHON3: import builtins else: import __builtin__ as builtins def open(file): ''' Returns a SpyFile object for an ERDAS/Lan image file. Arguments: `file` (str): Name of the ERDAS/Lan image data file. Returns: A SpyFile object for the image file. Raises: spectral.io.spyfile.InvalidFileError ''' # ERDAS 7.5 headers do not specify byte order so we'll guess little endian. # If any of the parameters look weird, we'll try again with big endian. class Params: pass p = Params() p.byte_order = 0 file_path = find_file_path(file) lh = read_erdas_lan_header(find_file_path(file)) if lh["nbands"] < 0 or lh["nbands"] > 512 or \ lh["ncols"] < 0 or lh["ncols"] > 10000 or \ lh["nrows"] < 0 or lh["nrows"] > 10000: p.byte_order = 1 lh = read_erdas_lan_header(file_path, 1) p.filename = file_path p.nbands = lh["nbands"] p.ncols = lh["ncols"] p.nrows = lh["nrows"] p.offset = 128 if lh["packing"] == 2: p.dtype = np.dtype('i2').str elif lh["packing"] == 0: p.dtype = np.dtype('i1').str elif lh["packing"] == 1: msg = '4-bit data type not supported in SPy ERDAS/Lan format handler.' raise InvalidFileError(msg) else: msg = 'Unexpected data type specified in ERDAS/Lan header.' raise InvalidFileError(msg) if spy.byte_order != 0: p.dtype = np.dtype(p.dtype).newbyteorder().str return BilFile(p, lh) def read_erdas_lan_header(fileName, byte_order=0): '''Read parameters from a lan file header. Arguments: fileName (str): File to open. byte_order (int) [default 0]: Specifies whether to read as little (0) or big (1) endian. ''' f = builtins.open(fileName, "rb") h = {} h["format"] = "lan" h["fileName"] = fileName h["sizeOfHeader"] = 128 h["type"] = f.read(6) if h["type"] not in (b'HEAD74', b'HEADER'): raise InvalidFileError('Does not look like an ERDAS Lan header.') # Read all header data into arrays word = array.array(typecode('h')) dword = array.array(typecode('i')) float = array.array(typecode('f')) word.fromfile(f, 2) f.seek(16) if h["type"] == b'HEAD74': dword.fromfile(f, 4) else: float.fromfile(f, 4) f.seek(88) word.fromfile(f, 2) f.seek(106) word.fromfile(f, 1) float.fromfile(f, 5) if byte_order != spy.byte_order: word.byteswap() dword.byteswap() float.byteswap() # Unpack all header data h["packing"] = word.pop(0) h["nbands"] = word.pop(0) if h["type"] == b'HEAD74': h["ncols"] = dword.pop(0) h["nrows"] = dword.pop(0) h["pixel_xcoord"] = dword.pop(0) h["pixel_ycoord"] = dword.pop(0) else: h["ncols"] = int(float.pop(0)) h["nrows"] = int(float.pop(0)) h["pixel_xcoord"] = float.pop(0) h["pixel_ycoord"] = float.pop(0) h["map_type"] = word.pop(0) h["nclasses"] = word.pop(0) h["area_unit"] = word.pop(0) h["ypixel_size"] = float.pop() h["xpixel_size"] = float.pop() h["map_ycoord"] = float.pop() h["map_xcoord"] = float.pop() h["narea_units"] = float.pop() f.close() return h spectral-0.22.4/spectral/io/spyfile.py000066400000000000000000000722261412674721200177040ustar00rootroot00000000000000''' :class:`~spectral.SpyFile` is the base class for creating objects to read hyperspectral data files. When a :class:`~spectral.SpyFile` object is created, it provides an interface to read data from a corresponding file. When an image is opened, the actual object returned will be a subclass of :class:`~spectral.SpyFile` (BipFile, BilFile, or BsqFile) corresponding to the interleave of the data within the image file. Let's open our sample image. .. ipython:: In [1]: from spectral import * In [2]: img = open_image('92AV3C.lan') In [3]: img.__class__ Out[3]: spectral.io.bilfile.BilFile In [4]: print(img) Data Source: '/Users/thomas/spectral_data/92AV3C.lan' # Rows: 145 # Samples: 145 # Bands: 220 Interleave: BIL Quantization: 16 bits Data format: int16 The image was not located in the working directory but it was still opened because it was in a directory specified by the *SPECTRAL_DATA* environment variable. Because the image pixel data are interleaved by line, the *image* function returned a *BilFile* instance. Since hyperspectral image files can be quite large, only metadata are read from the file when the :class:`~spectral.SpyFile` object is first created. Image data values are only read when specifically requested via :class:`~spectral.SpyFile` methods. The :class:`~spectral.SpyFile` class provides a subscript operator that behaves much like the numpy array subscript operator. The :class:`~spectral.SpyFile` object is subscripted as an *MxNxB* array where *M* is the number of rows in the image, *N* is the number of columns, and *B* is thenumber of bands. .. ipython:: In [5]: img.shape Out[5]: (145, 145, 220) In [6]: pixel = img[50,100] In [7]: pixel.shape Out[7]: (220,) In [8]: band6 = img[:,:,5] In [9]: band6.shape Out[9]: (145, 145, 1) The image data values were not read from the file until the subscript operator calls were performed. Note that since Python indices start at 0, ``img[50,100]`` refers to the pixel at 51st row and 101st column of the image. Similarly, ``img[:,:,5]`` refers to all the rows and columns for the 6th band of the image. :class:`~spectral.SpyFile` subclass instances returned for particular image files will also provide the following methods: ============== =============================================================== Method Description ============== =============================================================== read_band Reads a single band into an *MxN* array read_bands Reads multiple bands into an *MxNxC* array read_pixel Reads a single pixel into a length *B* array read_subregion Reads multiple bands from a rectangular sub-region of the image read_subimage Reads specified rows, columns, and bands ============== =============================================================== :class:`~spectral.SpyFile` objects have a ``bands`` member, which is an instance of a :class:`~spectral.BandInfo` object that contains optional information about the images spectral bands. ''' from __future__ import absolute_import, division, print_function, unicode_literals import array import numpy as np import os import warnings import spectral as spy from .. import SpyException from ..image import Image, ImageArray from ..utilities.errors import has_nan, NaNValueWarning from ..utilities.python23 import typecode, tobytes, frombytes class FileNotFoundError(SpyException): pass class InvalidFileError(SpyException): '''Raised when file contents are invalid for the exepected file type.''' pass def find_file_path(filename): ''' Search cwd and SPECTRAL_DATA directories for the given file. ''' pathname = None dirs = [os.curdir] if 'SPECTRAL_DATA' in os.environ: dirs += os.environ['SPECTRAL_DATA'].split(os.pathsep) for d in dirs: testpath = os.path.join(d, filename) if os.path.isfile(testpath): pathname = testpath break if not pathname: msg = 'Unable to locate file "%s". If the file exists, ' \ 'use its full path or place its directory in the ' \ 'SPECTRAL_DATA environment variable.' % filename raise FileNotFoundError(msg) return pathname class SpyFile(Image): '''A base class for accessing spectral image files''' def __init__(self, params, metadata=None): Image.__init__(self, params, metadata) # Number by which to divide values read from file. self.scale_factor = 1.0 def set_params(self, params, metadata): Image.set_params(self, params, metadata) try: self.filename = params.filename self.offset = params.offset self.byte_order = params.byte_order if spy.byte_order != self.byte_order: self.swap = 1 else: self.swap = 0 self.sample_size = np.dtype(params.dtype).itemsize self.fid = open(find_file_path(self.filename), "rb") # So that we can use this more like a Numeric array self.shape = (self.nrows, self.ncols, self.nbands) except: raise def transform(self, xform): '''Returns a SpyFile image with the linear transform applied.''' # This allows a LinearTransform object to take the SpyFile as an arg. return transform_image(xform, self) def __str__(self): '''Prints basic parameters of the associated file.''' s = '\tData Source: \'%s\'\n' % self.filename s += '\t# Rows: %6d\n' % (self.nrows) s += '\t# Samples: %6d\n' % (self.ncols) s += '\t# Bands: %6d\n' % (self.shape[2]) if self.interleave == spy.BIL: interleave = 'BIL' elif self.interleave == spy.BIP: interleave = 'BIP' else: interleave = 'BSQ' s += '\tInterleave: %6s\n' % (interleave) s += '\tQuantization: %3d bits\n' % (self.sample_size * 8) s += '\tData format: %8s' % np.dtype(self.dtype).name return s def load(self, **kwargs): '''Loads entire image into memory in a :class:`spectral.image.ImageArray`. Keyword Arguments: `dtype` (numpy.dtype): An optional dtype to which the loaded array should be cast. `scale` (bool, default True): Specifies whether any applicable scale factor should be applied to the data after loading. :class:`spectral.image.ImageArray` is derived from both :class:`spectral.image.Image` and :class:`numpy.ndarray` so it supports the full :class:`numpy.ndarray` interface. The returns object will have shape `(M,N,B)`, where `M`, `N`, and `B` are the numbers of rows, columns, and bands in the image. ''' for k in list(kwargs.keys()): if k not in ('dtype', 'scale'): raise ValueError('Invalid keyword %s.' % str(k)) dtype = kwargs.get('dtype', ImageArray.format) data = array.array(typecode('b')) self.fid.seek(self.offset) data.fromfile(self.fid, self.nrows * self.ncols * self.nbands * self.sample_size) npArray = np.frombuffer(tobytes(data), dtype=self.dtype) if self.interleave == spy.BIL: npArray.shape = (self.nrows, self.nbands, self.ncols) npArray = npArray.transpose([0, 2, 1]) elif self.interleave == spy.BSQ: npArray.shape = (self.nbands, self.nrows, self.ncols) npArray = npArray.transpose([1, 2, 0]) else: npArray.shape = (self.nrows, self.ncols, self.nbands) npArray = npArray.astype(dtype) if self.scale_factor != 1 and kwargs.get('scale', True): npArray = npArray / float(self.scale_factor) imarray = ImageArray(npArray, self) if has_nan(imarray): warnings.warn('Image data contains NaN values.', NaNValueWarning) return imarray def __getitem__(self, args): '''Subscripting operator that provides a numpy-like interface. Usage:: x = img[i, j] x = img[i, j, k] Arguments: `i`, `j`, `k` (int or :class:`slice` object) Integer subscript indices or slice objects. The subscript operator emulates the :class:`numpy.ndarray` subscript operator, except data are read from the corresponding image file instead of an array object in memory. For frequent access or when accessing a large fraction of the image data, consider calling :meth:`spectral.SpyFile.load` to load the data into an :meth:`spectral.image.ImageArray` object and using its subscript operator instead. Examples: Read the pixel at the 30th row and 51st column of the image:: pixel = img[29, 50] Read the 10th band:: band = img[:, :, 9] Read the first 30 bands for a square sub-region of the image:: region = img[50:100, 50:100, :30] ''' atypes = [type(a) for a in args] if len(args) < 2: raise IndexError('Too few subscript indices.') fix_negative_indices = self._fix_negative_indices if atypes[0] == atypes[1] == int and len(args) == 2: row = fix_negative_indices(args[0], 0) col = fix_negative_indices(args[1], 1) return self.read_pixel(row, col) elif len(args) == 3 and atypes[0] == atypes[1] == atypes[2] == int: row = fix_negative_indices(args[0], 0) col = fix_negative_indices(args[1], 1) band = fix_negative_indices(args[2], 2) return self.read_datum(row, col, band) else: # At least one arg should be a slice if atypes[0] == slice: (xstart, xstop, xstep) = (args[0].start, args[0].stop, args[0].step) if xstart is None: xstart = 0 if xstop is None: xstop = self.nrows if xstep is None: xstep = 1 rows = list(range(xstart, xstop, xstep)) else: rows = [args[0]] if atypes[1] == slice: (ystart, ystop, ystep) = (args[1].start, args[1].stop, args[1].step) if ystart is None: ystart = 0 if ystop is None: ystop = self.ncols if ystep is None: ystep = 1 cols = list(range(ystart, ystop, ystep)) else: cols = [args[1]] if len(args) == 2 or args[2] is None: bands = None elif atypes[2] == slice: (zstart, zstop, zstep) = (args[2].start, args[2].stop, args[2].step) if zstart == zstop == zstep == None: bands = None else: if zstart is None: zstart = 0 if zstop is None: zstop = self.nbands if zstep is None: zstep = 1 bands = list(range(zstart, zstop, zstep)) elif atypes[2] == int: bands = [args[2]] else: # Band indices should be in a list bands = args[2] if atypes[0] == slice and xstep == 1 \ and atypes[1] == slice and ystep == 1 \ and (bands is None or type(bands) == list): xstart = fix_negative_indices(xstart, 0) xstop = fix_negative_indices(xstop, 0) ystart = fix_negative_indices(ystart, 0) ystop = fix_negative_indices(ystop, 0) bands = fix_negative_indices(bands, 2) return self.read_subregion((xstart, xstop), (ystart, ystop), bands) rows = fix_negative_indices(rows, 0) cols = fix_negative_indices(cols, 1) bands = fix_negative_indices(bands, 2) return self.read_subimage(rows, cols, bands) def _fix_negative_indices(self, indices, dim): if not indices: return indices dim_len = self.shape[dim] try: return [i if i >= 0 else dim_len + i for i in indices] except: return indices if indices >= 0 else dim_len + indices def params(self): '''Return an object containing the SpyFile parameters.''' p = Image.params(self) p.filename = self.filename p.offset = self.offset p.byte_order = self.byte_order p.sample_size = self.sample_size return p def __del__(self): self.fid.close() class SubImage(SpyFile): ''' Represents a rectangular sub-region of a larger SpyFile object. ''' def __init__(self, image, row_range, col_range): '''Creates a :class:`Spectral.SubImage` for a rectangular sub-region. Arguments: `image` (SpyFile): The image for which to define the sub-image. `row_range` (2-tuple): Integers [i, j) defining the row limits of the sub-region. `col_range` (2-tuple): Integers [i, j) defining the col limits of the sub-region. Returns: A :class:`spectral.SubImage` object providing a :class:`spectral.SpyFile` interface to a sub-region of the image. Raises: :class:`IndexError` Row and column ranges must be 2-tuples (i,j) where i >= 0 and i < j. ''' if row_range[0] < 0 or \ row_range[1] > image.nrows or \ col_range[0] < 0 or \ col_range[1] > image.ncols: raise IndexError('SubImage index out of range.') p = image.params() SpyFile.__init__(self, p, image.metadata) self.parent = image self.row_offset = row_range[0] self.col_offset = col_range[0] self.nrows = row_range[1] - row_range[0] self.ncols = col_range[1] - col_range[0] self.shape = (self.nrows, self.ncols, self.nbands) def read_band(self, band): '''Reads a single band from the image. Arguments: `band` (int): Index of band to read. Returns: :class:`numpy.ndarray` An `MxN` array of values for the specified band. ''' return self.parent.read_subregion([self.row_offset, self.row_offset + self.nrows - 1], [self.col_offset, self.col_offset + self.ncols - 1], [band]) def read_bands(self, bands): '''Reads multiple bands from the image. Arguments: `bands` (list of ints): Indices of bands to read. Returns: :class:`numpy.ndarray` An `MxNxL` array of values for the specified bands. `M` and `N` are the number of rows & columns in the image and `L` equals len(`bands`). ''' return self.parent.read_subregion([self.row_offset, self.row_offset + self.nrows - 1], [self.col_offset, self.col_offset + self.ncols - 1], bands) def read_pixel(self, row, col): '''Reads the pixel at position (row,col) from the file. Arguments: `row`, `col` (int): Indices of the row & column for the pixel Returns: :class:`numpy.ndarray` A length-`B` array, where `B` is the number of image bands. ''' return self.parent.read_pixel(row + self.row_offset, col + self.col_offset) def read_subimage(self, rows, cols, bands=[]): ''' Reads arbitrary rows, columns, and bands from the image. Arguments: `rows` (list of ints): Indices of rows to read. `cols` (list of ints): Indices of columns to read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. Returns: :class:`numpy.ndarray` An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`), and `L` = len(bands) (or # of image bands if `bands` == None). ''' return self.parent.read_subimage(list(array.array(rows) \ + self.row_offset), list(array.array(cols) \ + self.col_offset), bands) def read_subregion(self, row_bounds, col_bounds, bands=None): ''' Reads a contiguous rectangular sub-region from the image. Arguments: `row_bounds` (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. `col_bounds` (2-tuple of ints): (a, b) -> Columnss a through b-1 will be read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. Returns: :class:`numpy.ndarray` An `MxNxL` array. ''' return self.parent.read_subimage(list(np.array(row_bounds) \ + self.row_offset), list(np.array(col_bounds) \ + self.col_offset), bands) def tile_image(im, nrows, ncols): ''' Break an image into nrows x ncols tiles. USAGE: tiles = tile_image(im, nrows, ncols) ARGUMENTS: im The SpyFile to tile. nrows Number of tiles in the veritical direction. ncols Number of tiles in the horizontal direction. RETURN VALUE: tiles A list of lists of SubImage objects. tiles contains nrows lists, each of which contains ncols SubImage objects. ''' x = (np.array(list(range(nrows + 1))) * float(im.nrows) / nrows).astype(int) y = (np.array(list(range(ncols + 1))) * float(im.ncols) / ncols).astype(int) x[-1] = im.nrows y[-1] = im.ncols tiles = [] for r in range(len(x) - 1): row = [] for c in range(len(y) - 1): si = SubImage(im, [x[r], x[r + 1]], [y[c], y[c + 1]]) row.append(si) tiles.append(row) return tiles def transform_image(transform, img): '''Applies a linear transform to an image. Arguments: `transform` (ndarray or LinearTransform): The `CxB` linear transform to apply. `img` (ndarray or :class:`spectral.SpyFile`): The `MxNxB` image to be transformed. Returns (ndarray or :class:spectral.spyfile.TransformedImage`): The transformed image. If `img` is an ndarray, then a `MxNxC` ndarray is returned. If `img` is a :class:`spectral.SpyFile`, then a :class:`spectral.spyfile.TransformedImage` is returned. ''' from ..algorithms.transforms import LinearTransform if isinstance(img, np.ndarray): if isinstance(transform, LinearTransform): return transform(img) ret = np.empty(img.shape[:2] + (transform.shape[0],), img.dtype) for i in range(img.shape[0]): for j in range(img.shape[1]): ret[i, j] = np.dot(transform, img[i, j]) return ret else: return TransformedImage(transform, img) class TransformedImage(Image): ''' An image with a linear transformation applied to each pixel spectrum. The transformation is not applied until data is read from the image file. ''' dtype = np.dtype('f4').char def __init__(self, transform, img): from ..algorithms.transforms import LinearTransform if not isinstance(img, Image): raise Exception( 'Invalid image argument to to TransformedImage constructor.') if isinstance(transform, np.ndarray): transform = LinearTransform(transform) self.transform = transform if self.transform.dim_in not in (None, img.shape[-1]): raise Exception('Number of bands in image (%d) do not match the ' ' input dimension of the transform (%d).' % (img.shape[-1], transform.dim_in)) params = img.params() self.set_params(params, params.metadata) # If img is also a TransformedImage, then just modify the transform if isinstance(img, TransformedImage): self.transform = self.transform.chain(img.transform) self.image = img.image else: self.image = img if self.transform.dim_out is not None: self.shape = self.image.shape[:2] + (self.transform.dim_out,) self.nbands = self.transform.dim_out else: self.shape = self.image.shape self.nbands = self.image.nbands @property def bands(self): return self.image.bands def __getitem__(self, args): ''' Get data from the image and apply the transform. ''' if len(args) < 2: raise Exception('Must pass at least two subscript arguments') # Note that band indices are wrt transformed features if len(args) == 2 or args[2] is None: bands = list(range(self.nbands)) elif type(args[2]) == slice: (zstart, zstop, zstep) = (args[2].start, args[2].stop, args[2].step) if zstart is None: zstart = 0 if zstop is None: zstop = self.nbands if zstep is None: zstep = 1 bands = list(range(zstart, zstop, zstep)) elif isinstance(args[2], int): bands = [args[2]] else: # Band indices should be in a list bands = args[2] orig = self.image.__getitem__(args[:2]) if len(orig.shape) == 1: orig = orig[np.newaxis, np.newaxis, :] elif len(orig.shape) == 2: orig = orig[np.newaxis, :] transformed_xy = np.zeros(orig.shape[:2] + (self.shape[2],), self.transform.dtype) for i in range(transformed_xy.shape[0]): for j in range(transformed_xy.shape[1]): transformed_xy[i, j] = self.transform(orig[i, j]) # Remove unnecessary dimensions transformed = np.take(transformed_xy, bands, 2) return transformed.squeeze() def __str__(self): s = '\tTransformedImage object with output dimensions:\n' s += '\t# Rows: %6d\n' % (self.nrows) s += '\t# Samples: %6d\n' % (self.ncols) s += '\t# Bands: %6d\n\n' % (self.shape[2]) s += '\tThe linear transform is applied to the following image:\n\n' s += str(self.image) return s def read_pixel(self, row, col): return self.transform(self.image.read_pixel(row, col)) def load(self): '''Loads all image data, transforms it, and returns an ndarray).''' data = self.image.load() return self.transform(data) def read_subregion(self, row_bounds, col_bounds, bands=None): ''' Reads a contiguous rectangular sub-region from the image. First arg is a 2-tuple specifying min and max row indices. Second arg specifies column min and max. If third argument containing list of band indices is not given, all bands are read. ''' data = self.image.read_subregion(row_bounds, col_bounds) xdata = self.transform(data) if bands: return np.take(xdata, bands, 2) else: return xdata def read_subimage(self, rows, cols, bands=None): ''' Reads a sub-image from a rectangular region within the image. First arg is a 2-tuple specifying min and max row indices. Second arg specifies column min and max. If third argument containing list of band indices is not given, all bands are read. ''' data = self.image.read_subimage(rows, cols) xdata = self.transform(data) if bands: return np.take(xdata, bands, 2) else: return xdata def read_datum(self, i, j, k): return self.read_pixel(i, j)[k] def read_bands(self, bands): shape = (self.image.nrows, self.image.ncols, len(bands)) data = np.zeros(shape, float) for i in range(shape[0]): for j in range(shape[1]): data[i, j] = self.read_pixel(i, j)[bands] return data class MemmapFile(object): '''Interface class for SpyFile subclasses using `numpy.memmap` objects.''' def _disable_memmap(self): '''Disables memmap and reverts to direct file reads (slower).''' self._memmap = None @property def using_memmap(self): '''Returns True if object is using a `numpy.memmap` to read data.''' return self._memmap is not None def open_memmap(self, **kwargs): '''Returns a new `numpy.memmap` object for image file data access. Keyword Arguments: `interleave` (str, default 'bip'): Specifies the shape/interleave of the returned object. Must be one of ['bip', 'bil', 'bsq', 'source']. If not specified, the memmap will be returned as 'bip'. If the interleave is 'source', the interleave of the memmap will be the same as the source data file. If the number of rows, columns, and bands in the file are R, C, and B, the shape of the returned memmap array will be as follows: .. table:: ========== =========== interleave array shape ========== =========== 'bip' (R, C, B) 'bil' (R, B, C) 'bsq' (B, R, C) ========== =========== `writable` (bool, default False): If `writable` is True, modifying values in the returned memmap will result in corresponding modification to the image data file. ''' src_inter = {spy.BIL: 'bil', spy.BIP: 'bip', spy.BSQ: 'bsq'}[self.interleave] dst_inter = kwargs.get('interleave', 'bip').lower() if dst_inter not in ['bip', 'bil', 'bsq', 'source']: raise ValueError('Invalid interleave specified.') if kwargs.get('writable', False) is True: mode = 'r+' else: mode = 'r' memmap = self._open_memmap(mode) if dst_inter == 'source': dst_inter = src_inter if src_inter == dst_inter: return memmap else: return np.transpose(memmap, interleave_transpose(src_inter, dst_inter)) def asarray(self, writable=False): '''Returns an object with a standard numpy array interface. The function returns a numpy memmap created with the `open_memmap` method. This function is for compatibility with ImageArray objects. Keyword Arguments: `writable` (bool, default False): If `writable` is True, modifying values in the returned memmap will result in corresponding modification to the image data file. ''' return self.open_memmap(writable=writable) def interleave_transpose(int1, int2): '''Returns the 3-tuple of indices to transpose between interleaves. Arguments: `int1`, `int2` (string): The input and output interleaves. Each should be one of "bil", "bip", or "bsq". Returns: A 3-tuple of integers that can be passed to `numpy.transpose` to convert and RxCxB image between the two interleaves. ''' if int1.lower() not in ('bil', 'bip', 'bsq'): raise ValueError('Invalid interleave: %s' % str(int1)) if int2.lower() not in ('bil', 'bip', 'bsq'): raise ValueError('Invalid interleave: %s' % str(int2)) int1 = int1.lower() int2 = int2.lower() if int1 == 'bil': if int2 == 'bil': return (1, 1, 1) elif int2 == 'bip': return (0, 2, 1) else: return (1, 0, 2) elif int1 == 'bip': if int2 == 'bil': return (0, 2, 1) elif int2 == 'bip': return (1, 1, 1) else: return (2, 0, 1) else: # bsq if int2 == 'bil': return (1, 0, 2) elif int2 == 'bip': return (1, 2, 0) else: return (1, 1, 1) spectral-0.22.4/spectral/spectral.py000066400000000000000000000077421412674721200174400ustar00rootroot00000000000000''' Top-level functions & classes. ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging import numbers import numpy as np import pickle import os from warnings import warn #from .algorithms.algorithms import TrainingClassSet #from . import io #from .io import aviris, envi, erdas, spyfile #from .io.spyfile import find_file_path, SpyFile from . import settings def _init(): '''Basic configuration of the spectral package.''' _setup_logger() try: global settings from .graphics import graphics as spygraphics from .graphics import spypylab settings.plotter = spypylab settings.viewer = spygraphics except: raise warn('Unable to import or configure pylab plotter. Spectrum plots ' 'will be unavailable.', UserWarning) from .utilities import status spectral = __import__(__name__.split('.')[0]) spectral._status = status.StatusDisplay() def _setup_logger(): logger = logging.getLogger('spectral') logger.setLevel(logging.INFO) ch = logging.StreamHandler() formatter = logging.Formatter('%(name)s:%(levelname)s: %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) class BandInfo: '''A BandInfo object characterizes the spectral bands associated with an image. All BandInfo member variables are optional. For *N* bands, all members of type will have length *N* and contain float values. ================= ===================================== ======= Member Description Default ================= ===================================== ======= centers List of band centers None bandwidths List of band FWHM values None centers_stdevs List of std devs of band centers None bandwidth_stdevs List of std devs of bands FWHMs None band_quantity Image data type (e.g., "reflectance") "" band_unit Band unit (e.g., "nanometer") "" ================= ===================================== ======= ''' def __init__(self): self.centers = None self.bandwidths = None self.centers_stdevs = None self.bandwidth_stdevs = None self.band_quantity = None self.band_unit = None def open_image(file): ''' Locates & opens the specified hyperspectral image. Arguments: file (str): Name of the file to open. Returns: SpyFile object to access the file. Raises: IOError. This function attempts to determine the associated file type and open the file. If the specified file is not found in the current directory, all directories listed in the :const:`SPECTRAL_DATA` environment variable will be searched until the file is found. If the file being opened is an ENVI file, the `file` argument should be the name of the header file. ''' from . import io pathname = io.spyfile.find_file_path(file) # Try to open it as an ENVI header file. try: return io.envi.open(pathname) except io.envi.FileNotAnEnviHeader: # It isn't an ENVI file so try another file type pass except: raise # Maybe it's an Erdas Lan file try: return io.erdas.open(pathname) except: pass # See if the size is consistent with an Aviris file try: return io.aviris.open(pathname) except: pass raise IOError('Unable to determine file type or type not supported.') def load_training_sets(file, image=None): ''' Loads a list of TrainingSet objects from a file. This function assumes that all the sets in the list refer to the same image and mask array. If that is not the case, this function should not be used. ''' from .algorithms.algorithms import TrainingClassSet ts = TrainingClassSet() ts.load(file, image) return ts spectral-0.22.4/spectral/tests/000077500000000000000000000000001412674721200164015ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/__init__.py000066400000000000000000000021261412674721200205130ustar00rootroot00000000000000''' Package containing unit test modules for various functionality. To run all unit tests, type the following from the system command line: # python -m spectral.tests.run ''' from __future__ import absolute_import, division, print_function, unicode_literals # If abort_on_fail is True, an AssertionError will be raised when a unit test # fails; otherwise, the failure will be printed to stdout and testing will # continue. abort_on_fail = True # Summary stats of unit test execution _num_tests_run = 0 _num_tests_failed = 0 # Subdirectory to be created for unit test files testdir = 'spectral_test_files' from . import database from . import spyfile from . import transforms from . import memmap from . import envi from . import spymath from . import detectors from . import classifiers from . import dimensionality from . import spatial from . import iterators from . import continuum # List of all submodules to be run from the `run` submodule. all_tests = [spyfile, memmap, iterators, transforms, envi, spymath, detectors, classifiers, dimensionality, spatial, database, continuum] spectral-0.22.4/spectral/tests/classifiers.py000066400000000000000000000164451412674721200212740ustar00rootroot00000000000000''' Runs unit tests for classification routines. To run the unit tests, type the following from the system command line: # python -m spectral.tests.classifiers ''' from __future__ import absolute_import, division, print_function, unicode_literals import os import numpy as np import spectral as spy from numpy.testing import assert_allclose from .spytest import SpyTest, test_method from spectral.tests import testdir class ClassifierTest(SpyTest): '''Tests various classfication functions.''' def setup(self): if not os.path.isdir(testdir): os.mkdir(testdir) self.image = spy.open_image('92AV3C.lan') self.data = self.image.load() self.gt = spy.open_image('92AV3GT.GIS').read_band(0) self.ts = spy.create_training_classes(self.data, self.gt, calc_stats=True) self.class_filename = os.path.join(testdir, '92AV3C.classes') def test_save_training_sets(self): '''Test that TrainingClassSet data can be saved without exception.''' ts = spy.create_training_classes(self.data, self.gt, calc_stats=True) ts.save(self.class_filename) def test_load_training_sets(self): '''Test that the data loaded is the same as was saved.''' ts = spy.create_training_classes(self.data, self.gt, calc_stats=True) ts.save(self.class_filename) ts2 = spy.load_training_sets(self.class_filename, image=self.data) ids = list(ts.classes.keys()) for id in ids: s1 = ts[id] s2 = ts2[id] assert(s1.index == s2.index) np.testing.assert_almost_equal(s1.class_prob, s2.class_prob) assert_allclose(s1.stats.mean, s2.stats.mean) assert_allclose(s1.stats.cov, s2.stats.cov) np.testing.assert_equal(s1.stats.nsamples, s2.stats.nsamples) def test_gmlc_spectrum_image_equal(self): '''Tests that classification of spectrum is same as from image.''' gmlc = spy.GaussianClassifier(self.ts, min_samples=600) data = self.data[20: 30, 30: 40, :] assert(gmlc.classify_spectrum(data[2, 2]) == \ gmlc.classify_image(data)[2, 2]) def test_gmlc_classify_spyfile_runs(self): '''Tests that GaussianClassifier classifies a SpyFile object.''' gmlc = spy.GaussianClassifier(self.ts, min_samples=600) ret = gmlc.classify_image(self.image) def test_gmlc_classify_transformedimage_runs(self): '''Tests that GaussianClassifier classifies a TransformedImage object.''' pc = spy.principal_components(self.data).reduce(num=3) ximg = pc.transform(self.image) ts = spy.create_training_classes(pc.transform(self.data), self.gt, calc_stats=True) gmlc = spy.GaussianClassifier(ts) ret = gmlc.classify_image(ximg) def test_gmlc_classify_ndarray_transformedimage_equal(self): '''Gaussian classification of an ndarray and TransformedImage are equal''' pc = spy.principal_components(self.data).reduce(num=3) ximg = pc.transform(self.image) ts = spy.create_training_classes(pc.transform(self.data), self.gt, calc_stats=True) gmlc = spy.GaussianClassifier(ts) cl_ximg = gmlc.classify_image(ximg) cl_ndarray = gmlc.classify_image(pc.transform(self.data)) assert(np.all(cl_ximg == cl_ndarray)) def test_mahalanobis_class_mean(self): '''Test that a class's mean spectrum is classified as that class. Note this assumes that class priors are equal. ''' mdc = spy.MahalanobisDistanceClassifier(self.ts) cl = mdc.classes[0] assert(mdc.classify(cl.stats.mean) == cl.index) def test_mahalanobis_classify_spyfile_runs(self): '''Mahalanobis classifier works with a SpyFile object.''' mdc = spy.MahalanobisDistanceClassifier(self.ts) ret = mdc.classify_image(self.image) def test_mahalanobis_classify_transformedimage_runs(self): '''Mahalanobis classifier works with a TransformedImage object.''' pc = spy.principal_components(self.data).reduce(num=3) ximg = pc.transform(self.image) ts = spy.create_training_classes(pc.transform(self.data), self.gt, calc_stats=True) gmlc = spy.MahalanobisDistanceClassifier(ts) ret = gmlc.classify_image(ximg) def test_mahalanobis_classify_ndarray_transformedimage_equal(self): '''Mahalanobis classification of ndarray and TransformedImage are equal''' pc = spy.principal_components(self.data).reduce(num=3) ximg = pc.transform(self.image) ts = spy.create_training_classes(pc.transform(self.data), self.gt, calc_stats=True) mdc = spy.GaussianClassifier(ts) cl_ximg = mdc.classify_image(ximg) cl_ndarray = mdc.classify_image(pc.transform(self.data)) assert(np.all(cl_ximg == cl_ndarray)) def test_perceptron_learns_and(self): '''Test that 2x1 network can learn the logical AND function.''' from spectral.algorithms.perceptron import test_and (success, p) = test_and(stdout=None) assert(success) def test_perceptron_learns_xor(self): '''Test that 2x2x1 network can learn the logical XOR function.''' from spectral.algorithms.perceptron import test_xor231 # XOR isn't guaranteed to converge so try at lease a few times for i in range(10): (success, p) = test_xor231(3000, stdout=None) if success is True: return assert(False) def test_perceptron_learns_xor_222(self): '''Test that 2x2x2 network can learn the logical XOR function.''' from spectral.algorithms.perceptron import test_xor222 # XOR isn't guaranteed to converge so try at lease a few times for i in range(10): (success, p) = test_xor222(3000, stdout=None) if success is True: return assert(False) def test_perceptron_learns_image_classes(self): '''Test that perceptron can learn image class means.''' fld = spy.linear_discriminant(self.ts) xdata = fld.transform(self.data) classes = spy.create_training_classes(xdata, self.gt) nfeatures = xdata.shape[-1] nclasses = len(classes) for i in range(10): p = spy.PerceptronClassifier([nfeatures, 20, 8, nclasses]) success = p.train(classes, 1, 5000, batch=1, momentum=0.3, rate=0.3) if success is True: return assert(False) def test_mahalanobis_spectrum_image_equal(self): '''Tests that classification of spectrum is same as from image.''' mdc = spy.MahalanobisDistanceClassifier(self.ts) data = self.data[20: 30, 30: 40, :] assert(mdc.classify_spectrum(data[2, 2]) == \ mdc.classify_image(data)[2, 2]) def run(): print('\n' + '-' * 72) print('Running classifier tests.') print('-' * 72) test = ClassifierTest() test.run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/continuum.py000066400000000000000000000271171412674721200210040ustar00rootroot00000000000000''' Runs unit tests for continuum processing functions. To run the unit tests, type the following from the system command line: # python -m spectral.tests.continuum ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np from numpy.testing import assert_allclose import spectral as spy from spectral.algorithms.spymath import matrix_sqrt from spectral.algorithms.continuum import spectral_continuum, remove_continuum, continuum_points from spectral.tests.spytest import SpyTest class ContinuumTest(SpyTest): def setup(self): self.image = spy.open_image('92AV3C.lan') self.bands = np.sort( spy.aviris.read_aviris_bands('92AV3C.spc').centers) class FindContinuumTest(ContinuumTest): '''Tests spectral_continuum.''' def test_few_simple_cases(self): spectrum = np.array([1., 2., 2.5, 1.6, 0.75, 1.5, 2.2, 2.9, 1.8]) bands = np.array([1., 2., 3., 4., 5., 6., 7., 8., 9.]) expected = np.array([1., 2., 2.5, 2.58, 2.66, 2.74, 2.82, 2.9, 1.8]) assert_allclose(expected, spectral_continuum(spectrum, bands)) spectrum = np.array([0.6, 1., 2.45, 3.1, 3.25, 4.15, 4.35, 4.1, 3.1, 2.7, 2., 2.85, 3.75, 3., 2., 0.9]) bands = np.array([0.3, 1., 1.8, 3., 4.5, 5.2, 6.45, 7., 7.1, 8., 8.1, 9., 9.3, 10.2, 10.5, 10.6]) expected = np.array([0.6, 1.46333333, 2.45, 3.1, 3.81590909, 4.15, 4.35, 4.23421053, 4.21315789, 4.02368421, 4.00263158, 3.81315789, 3.75, 3., 2., 0.9]) assert_allclose(expected, spectral_continuum(spectrum, bands)) spectrum = np.array([0.5, 1.1, 1.5, 2.4, 1.9, 1.0]) bands = np.array([0.5, 1.0, 1.7, 2.0, 3.0, 3.5]) expected = np.array([0.5, 1.13333333, 2.02, 2.4, 1.9, 1.]) assert_allclose(expected, spectral_continuum(spectrum, bands)) spectrum = np.array([0.5, 1.1, 1.8, 2.0, 1.1, 0.9, 0.4]) bands = np.array([0.5, 0.9, 1.6, 2.0, 2.1, 2.8, 3.0]) expected = np.array([0.5, 1.1, 1.8, 2., 1.8625, 0.9, 0.4]) assert_allclose(expected, spectral_continuum(spectrum, bands)) def test_simple_segmented(self): # A case without local maximum inside concave regions. # Same as convex hull. spectrum = np.array([0.6, 1., 2.45, 3.1, 3.25, 4.15, 4.35, 4.1, 3.1, 2.7, 2., 2.85, 3.75, 3., 2., 0.9]) bands = np.array([0.3, 1., 1.8, 3., 4.5, 5.2, 6.45, 7., 7.1, 8., 8.1, 9., 9.3, 10.2, 10.5, 10.6]) expected = np.array([0.6, 1.46333333, 2.45, 3.1, 3.81590909, 4.15, 4.35, 4.23421053, 4.21315789, 4.02368421, 4.00263158, 3.81315789, 3.75, 3., 2., 0.9, ]) assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # A case with single local maxima that gets filtered out, because it # does not satisfy quasi-convexity. spectrum = np.array([0.6, 1., 2.45, 3.1, 3.25, 4.15, 4.35, 4.1, 3.1, 3.7, 2., 2.85, 3.75, 3., 2., 0.9]) bands = np.array([0.3, 1., 1.8, 3., 4.5, 5.2, 6.45, 7., 7.1, 8., 8.1, 9., 9.3, 10.2, 10.5, 10.6]) expected = np.array([0.6, 1.46333333, 2.45, 3.1, 3.81590909, 4.15, 4.35, 4.23421053, 4.21315789, 4.02368421, 4.00263158, 3.81315789, 3.75, 3., 2., 0.9]) assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # Reversed case. spectrum = spectrum[::-1] bands = np.cumsum(np.concatenate( (np.array([0.3]), (bands[1:] - bands[:-1])[::-1]))) expected = expected[::-1] assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # A case with single valid local maxima inside concave region, # and one invalid. spectrum = np.array([0.60, 1.00, 2.45, 3.10, 3.25, 4.15, 4.35, 4.10, 3.10, 3.80, 3.50, 3.60, 2.00, 2.85, 3.75, 3.00, 2.00, 0.90]) bands = np.array([0.30, 1.00, 1.80, 3.00, 4.50, 5.20, 6.45, 7.00, 7.10, 8.00, 8.03, 8.07, 8.10, 9.00, 9.30, 10.20, 10.50, 10.6]) expected = np.array([0.6, 1.46333333, 2.45, 3.1, 3.81590909, 4.15, 4.35, 4.15483871, 4.11935484, 3.8, 3.79884615, 3.79730769, 3.79615385, 3.76153846, 3.75, 3., 2., 0.9]) assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # Reversed case. spectrum = spectrum[::-1] bands = np.cumsum(np.concatenate( (np.array([0.3]), (bands[1:] - bands[:-1])[::-1]))) expected = expected[::-1] assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # A case with two valid local maxima. spectrum = np.array([0.60, 1.00, 2.45, 3.10, 3.25, 4.15, 4.35, 4.10, 3.10, 3.80, 3.50, 3.75, 2.00, 2.85, 3.70, 3.00, 2.00, 0.90]) bands = np.array([0.30, 1.00, 1.80, 3.00, 4.50, 5.20, 6.45, 7.00, 7.10, 8.00, 8.03, 8.07, 8.10, 9.00, 9.30, 10.20, 10.50, 10.6]) expected = np.array([0.6, 1.46333333, 2.45, 3.1, 3.81590909, 4.15, 4.35, 4.15483871, 4.11935484, 3.8, 3.77857143, 3.75, 3.74878049, 3.71219512, 3.7, 3., 2., 0.9]) assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # Reversed case. spectrum = spectrum[::-1] bands = np.cumsum(np.concatenate( (np.array([0.3]), (bands[1:] - bands[:-1])[::-1]))) expected = expected[::-1] assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # A case with two valid local maxima, but one covering eliminating the # other. spectrum = np.array([0.60, 1.00, 2.45, 3.10, 3.25, 4.15, 4.35, 4.10, 3.10, 3.80, 3.50, 3.85, 2.00, 2.85, 3.70, 3.00, 2.00, 0.90]) bands = np.array([0.30, 1.00, 1.80, 3.00, 4.50, 5.20, 6.45, 7.00, 7.10, 8.00, 8.03, 8.07, 8.10, 9.00, 9.30, 10.20, 10.50, 10.6]) expected = np.array([0.6, 1.46333333, 2.45, 3.1, 3.81590909, 4.15, 4.35, 4.18024691, 4.14938272, 3.87160494, 3.86234568, 3.85, 3.84634146, 3.73658537, 3.7, 3., 2., 0.9]) assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) # Reversed case. spectrum = spectrum[::-1] bands = np.cumsum(np.concatenate( (np.array([0.3]), (bands[1:] - bands[:-1])[::-1]))) expected = expected[::-1] assert_allclose(expected, spectral_continuum( spectrum, bands, 'segmented')) def test_2d_array(self): part = self.image[20:22, 20:22].reshape(4, 220) cnt = spectral_continuum(part, self.bands) # Check some values to make sure results are sane. assert(cnt[0, 200] == 1422) assert(cnt[1, 200] == 1421) assert(cnt[2, 200] == 1469) assert(cnt[3, 200] == 1491) def test_3d_array(self): part = self.image[20:22, 20:22] cnt = spectral_continuum(part, self.bands) # Check some values to make sure results are sane. assert(cnt[0, 0, 200] == 1422) assert(cnt[0, 1, 200] == 1421) assert(cnt[1, 0, 200] == 1469) assert(cnt[1, 1, 200] == 1491) def test_out_parameter(self): part = self.image[20:22, 20:22] out = np.empty_like(part) cnt = spectral_continuum(part, self.bands, out=out) assert(cnt is out) # And just do a quick check if result is sane. assert(out[1, 1, 200] == 1491) class FindContinuumPointsTest(ContinuumTest): '''Tests continuum_points.''' def test_points_of_real_spectrum(self): points = continuum_points(self.image[20, 20], self.bands) assert(np.array_equal(points[0], self.bands[[0, 1, 2, 5, 6, 41, 219]])) assert(np.array_equal(points[1], np.array( [3505, 4141, 4516, 4924, 5002, 4712, 1019], dtype=np.int16))) def test_points_of_real_spectrum_segmented(self): # This example includes flat local maxima, that span three or more points. points = continuum_points(self.image[20, 20], self.bands, 'segmented') expected_result = ( np.array([400.019989, 409.820007, 419.619995, 449.070007, 458.899994, 783.27002, 802.530029, 841.039978, 860.280029, 879.530029, 994.880005, 1014.090027, 1052.48999, 1244.26001, 1273., 1282.959961, 1541.589966, 1561.439941, 1620.97998, 1630.900024, 2122.780029, 2132.72998, 2172.5, 2212.219971, 2222.149902, 2311.350098, 2331.139893, 2360.810059, 2390.459961, 2400.330078, 2429.949951, 2459.540039, 2498.959961]), np.array([3505, 4141, 4516, 4924, 5002, 4712, 4578, 4496, 4424, 4423, 3979, 3925, 3801, 3026, 2852, 2817, 2206, 2204, 2136, 2124, 1377, 1376, 1333, 1317, 1312, 1219, 1202, 1162, 1126, 1122, 1100, 1068, 1019], dtype=np.int16) ) assert(np.array_equal(points[0], expected_result[0])) assert(np.array_equal(points[1], expected_result[1])) class RemoveContinuumTest(ContinuumTest): '''Tests remove_continuum.''' def test_simple_case(self): continuum_removed = np.array([1., 0.6833713, 1., 1., 0.85169744, 1., 1., 0.96830329, 0.73579013, 0.67102681, 0.49967127, 0.74741201, 1., 1., 1., 1.]) bands = np.array([0.30, 1.00, 1.80, 3.00, 4.50, 5.20, 6.45, 7.00, 7.10, 8.00, 8.10, 9.00, 9.30, 10.20, 10.50, 10.6]) spectrum = np.array([0.60, 1.00, 2.45, 3.10, 3.25, 4.15, 4.35, 4.10, 3.10, 2.70, 2.00, 2.85, 3.75, 3.00, 2.00, 0.90]) assert_allclose(continuum_removed, remove_continuum(spectrum, bands)) def test_simple_case_segmented(self): continuum_removed = np.array([1., 0.6833713, 1., 1., 0.85169744, 1., 1., 0.98680124, 0.75254503, 1., 0.52684904, 0.75766871, 1., 1., 1., 1.]) bands = np.array([0.30, 1.00, 1.80, 3.00, 4.50, 5.20, 6.45, 7.00, 7.10, 8.00, 8.10, 9.00, 9.30, 10.20, 10.50, 10.6]) spectrum = np.array([0.60, 1.00, 2.45, 3.10, 3.25, 4.15, 4.35, 4.10, 3.10, 3.80, 2.00, 2.85, 3.75, 3.00, 2.00, 0.90]) assert_allclose(continuum_removed, remove_continuum( spectrum, bands, mode='segmented')) def test_in_and_out_same(self): part = self.image[20:22, 20:22].astype(np.float64) res = remove_continuum(part, self.bands, out=part) # Make sure results are sane. assert(res[1, 1, 200] == 0.8372113957762342) assert(res is part) def run(): print('\n' + '-' * 72) print('Running continuum tests.') print('-' * 72) for T in [FindContinuumTest, FindContinuumPointsTest, RemoveContinuumTest]: T().run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/data/000077500000000000000000000000001412674721200173125ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/92AV3C.spc000066400000000000000000000253401412674721200206740ustar00rootroot00000000000000FILE spectral_920530-930111.ascii --------------------------------- 400.019989 9.780000 0.920000 0.500000 2.000000 409.820007 9.820000 0.920000 0.500000 3.000000 419.619995 9.850000 0.930000 0.500000 4.000000 429.429993 9.890000 0.940000 0.500000 5.000000 439.250000 9.920000 0.950000 0.500000 6.000000 449.070007 9.940000 0.950000 0.500000 7.000000 458.899994 9.970000 0.960000 0.500000 8.000000 468.730011 9.990000 0.970000 0.500000 9.000000 478.570007 10.010000 0.970000 0.500000 10.000000 488.410004 10.020000 0.980000 0.500000 11.000000 498.260010 10.040000 0.990000 0.500000 12.000000 508.119995 10.050000 1.000000 0.500000 13.000000 517.979980 10.050000 1.000000 0.500000 14.000000 527.849976 10.060000 1.010000 0.500000 15.000000 537.719971 10.060000 1.020000 0.500000 16.000000 547.599976 10.060000 1.030000 0.500000 17.000000 557.489990 10.050000 1.030000 0.500000 18.000000 567.380005 10.040000 1.040000 0.500000 19.000000 577.280029 10.030000 1.050000 0.500000 20.000000 587.179993 10.020000 1.060000 0.500000 21.000000 597.090027 10.000000 1.060000 0.500000 22.000000 607.010010 9.980000 1.070000 0.500000 23.000000 616.929993 9.960000 1.080000 0.500000 24.000000 626.849976 9.940000 1.090000 0.500000 25.000000 636.780029 9.910000 1.090000 0.500000 26.000000 646.719971 9.880000 1.100000 0.500000 27.000000 656.669983 9.840000 1.110000 0.500000 28.000000 666.609985 9.810000 1.120000 0.500000 29.000000 676.570007 9.770000 1.120000 0.500000 30.000000 686.530029 9.730000 1.130000 0.500000 31.000000 696.500000 9.680000 1.140000 0.500000 32.000000 686.909973 8.870000 0.880000 0.500000 34.000000 696.549988 8.870000 0.880000 0.500000 35.000000 706.190002 8.880000 0.890000 0.500000 36.000000 715.830017 8.880000 0.890000 0.500000 37.000000 725.469971 8.880000 0.900000 0.500000 38.000000 735.109985 8.890000 0.900000 0.500000 39.000000 744.739990 8.890000 0.910000 0.500000 40.000000 754.380005 8.890000 0.910000 0.500000 41.000000 764.010010 8.900000 0.920000 0.500000 42.000000 773.640015 8.900000 0.920000 0.500000 43.000000 783.270020 8.900000 0.930000 0.500000 44.000000 792.909973 8.910000 0.930000 0.500000 45.000000 802.530029 8.910000 0.940000 0.500000 46.000000 812.159973 8.910000 0.940000 0.500000 47.000000 821.789978 8.920000 0.950000 0.500000 48.000000 831.409973 8.920000 0.950000 0.500000 49.000000 841.039978 8.920000 0.960000 0.500000 50.000000 850.659973 8.930000 0.960000 0.500000 51.000000 860.280029 8.930000 0.970000 0.500000 52.000000 869.909973 8.930000 0.970000 0.500000 53.000000 879.530029 8.930000 0.980000 0.500000 54.000000 889.140015 8.940000 0.980000 0.500000 55.000000 898.760010 8.940000 0.990000 0.500000 56.000000 908.380005 8.940000 0.990000 0.500000 57.000000 917.989990 8.940000 0.990000 0.500000 58.000000 927.609985 8.950000 1.000000 0.500000 59.000000 937.219971 8.950000 1.000000 0.500000 60.000000 946.830017 8.950000 1.010000 0.500000 61.000000 956.450012 8.950000 1.010000 0.500000 62.000000 966.059998 8.950000 1.020000 0.500000 63.000000 975.659973 8.960000 1.020000 0.500000 64.000000 985.270020 8.960000 1.030000 0.500000 65.000000 994.880005 8.960000 1.030000 0.500000 66.000000 1004.479980 8.960000 1.040000 0.500000 67.000000 1014.090027 8.960000 1.040000 0.500000 68.000000 1023.690002 8.970000 1.050000 0.500000 69.000000 1033.290039 8.970000 1.050000 0.500000 70.000000 1042.890015 8.970000 1.060000 0.500000 71.000000 1052.489990 8.970000 1.060000 0.500000 72.000000 1062.089966 8.970000 1.070000 0.500000 73.000000 1071.689941 8.970000 1.070000 0.500000 74.000000 1081.290039 8.970000 1.080000 0.500000 75.000000 1090.880005 8.980000 1.080000 0.500000 76.000000 1100.479980 8.980000 1.090000 0.500000 77.000000 1110.069946 8.980000 1.090000 0.500000 78.000000 1119.660034 8.980000 1.100000 0.500000 79.000000 1129.250000 8.980000 1.100000 0.500000 80.000000 1138.839966 8.980000 1.100000 0.500000 81.000000 1148.430054 8.980000 1.110000 0.500000 82.000000 1158.020020 8.980000 1.110000 0.500000 83.000000 1167.609985 8.980000 1.120000 0.500000 84.000000 1177.189941 8.980000 1.120000 0.500000 85.000000 1186.770020 8.990000 1.130000 0.500000 86.000000 1196.359985 8.990000 1.130000 0.500000 87.000000 1205.939941 8.990000 1.140000 0.500000 88.000000 1215.520020 8.990000 1.140000 0.500000 89.000000 1225.099976 8.990000 1.150000 0.500000 90.000000 1234.680054 8.990000 1.150000 0.500000 91.000000 1244.260010 8.990000 1.160000 0.500000 92.000000 1253.829956 8.990000 1.160000 0.500000 93.000000 1263.410034 8.990000 1.170000 0.500000 94.000000 1272.979980 8.990000 1.170000 0.500000 95.000000 1282.550049 8.990000 1.180000 0.500000 96.000000 1273.000000 9.180000 1.560000 0.700000 98.000000 1282.959961 9.200000 1.570000 0.700000 99.000000 1292.930054 9.220000 1.570000 0.700000 100.000000 1302.890015 9.240000 1.580000 0.700000 101.000000 1312.849976 9.260000 1.580000 0.700000 102.000000 1322.810059 9.280000 1.590000 0.700000 103.000000 1332.770020 9.300000 1.590000 0.700000 104.000000 1342.729980 9.320000 1.600000 0.700000 105.000000 1352.680054 9.340000 1.600000 0.700000 106.000000 1362.640015 9.360000 1.610000 0.700000 107.000000 1372.589966 9.370000 1.610000 0.700000 108.000000 1382.540039 9.390000 1.620000 0.700000 109.000000 1392.489990 9.410000 1.620000 0.700000 110.000000 1402.439941 9.430000 1.630000 0.700000 111.000000 1412.390015 9.440000 1.630000 0.700000 112.000000 1422.339966 9.460000 1.640000 0.700000 113.000000 1432.280029 9.470000 1.640000 0.700000 114.000000 1442.229980 9.490000 1.650000 0.700000 115.000000 1452.170044 9.510000 1.650000 0.700000 116.000000 1462.109985 9.520000 1.660000 0.700000 117.000000 1472.050049 9.540000 1.660000 0.700000 118.000000 1481.989990 9.550000 1.670000 0.700000 119.000000 1491.920044 9.570000 1.670000 0.700000 120.000000 1501.859985 9.580000 1.680000 0.700000 121.000000 1511.790039 9.590000 1.680000 0.700000 122.000000 1521.729980 9.610000 1.690000 0.700000 123.000000 1531.660034 9.620000 1.690000 0.700000 124.000000 1541.589966 9.630000 1.700000 0.700000 125.000000 1551.520020 9.650000 1.700000 0.700000 126.000000 1561.439941 9.660000 1.710000 0.700000 127.000000 1571.369995 9.670000 1.710000 0.700000 128.000000 1581.300049 9.680000 1.720000 0.700000 129.000000 1591.219971 9.700000 1.720000 0.700000 130.000000 1601.140015 9.710000 1.730000 0.700000 131.000000 1611.060059 9.720000 1.730000 0.700000 132.000000 1620.979980 9.730000 1.740000 0.700000 133.000000 1630.900024 9.740000 1.740000 0.700000 134.000000 1640.810059 9.750000 1.750000 0.700000 135.000000 1650.729980 9.760000 1.750000 0.700000 136.000000 1660.640015 9.770000 1.760000 0.700000 137.000000 1670.560059 9.780000 1.770000 0.700000 138.000000 1680.469971 9.790000 1.770000 0.700000 139.000000 1690.380005 9.800000 1.780000 0.700000 140.000000 1700.280029 9.810000 1.780000 0.700000 141.000000 1710.189941 9.820000 1.790000 0.700000 142.000000 1720.099976 9.820000 1.790000 0.700000 143.000000 1730.000000 9.830000 1.800000 0.700000 144.000000 1739.900024 9.840000 1.800000 0.700000 145.000000 1749.810059 9.850000 1.810000 0.700000 146.000000 1759.709961 9.850000 1.810000 0.700000 147.000000 1769.599976 9.860000 1.820000 0.700000 148.000000 1779.500000 9.870000 1.820000 0.700000 149.000000 1789.400024 9.870000 1.830000 0.700000 150.000000 1799.290039 9.880000 1.830000 0.700000 151.000000 1809.189941 9.890000 1.840000 0.700000 152.000000 1819.079956 9.890000 1.840000 0.700000 153.000000 1828.969971 9.900000 1.850000 0.700000 154.000000 1838.859985 9.900000 1.850000 0.700000 155.000000 1848.750000 9.910000 1.860000 0.700000 156.000000 1858.630005 9.910000 1.860000 0.700000 157.000000 1868.520020 9.910000 1.870000 0.700000 158.000000 1878.400024 9.920000 1.870000 0.700000 159.000000 1888.280029 9.920000 1.880000 0.700000 160.000000 1883.239990 13.720000 2.250000 1.850000 162.000000 1893.250000 13.750000 2.250000 1.850000 163.000000 1903.260010 13.790000 2.260000 1.850000 164.000000 1913.260010 13.820000 2.260000 1.850000 165.000000 1923.270020 13.850000 2.270000 1.850000 166.000000 1933.270020 13.880000 2.280000 1.850000 167.000000 1943.270020 13.910000 2.280000 1.850000 168.000000 1953.260010 13.950000 2.290000 1.850000 169.000000 1963.250000 13.970000 2.290000 1.850000 170.000000 1973.239990 14.000000 2.300000 1.850000 171.000000 1983.229980 14.030000 2.310000 1.850000 172.000000 1993.219971 14.060000 2.310000 1.850000 173.000000 2003.199951 14.090000 2.320000 1.850000 174.000000 2013.180054 14.110000 2.320000 1.850000 175.000000 2023.160034 14.140000 2.330000 1.850000 176.000000 2033.130005 14.160000 2.340000 1.850000 177.000000 2043.099976 14.190000 2.340000 1.850000 178.000000 2053.070068 14.210000 2.350000 1.850000 179.000000 2063.040039 14.230000 2.350000 1.850000 180.000000 2073.000000 14.260000 2.360000 1.850000 181.000000 2082.969971 14.280000 2.370000 1.850000 182.000000 2092.919922 14.300000 2.370000 1.850000 183.000000 2102.879883 14.320000 2.380000 1.850000 184.000000 2112.830078 14.340000 2.380000 1.850000 185.000000 2122.780029 14.360000 2.390000 1.850000 186.000000 2132.729980 14.380000 2.400000 1.850000 187.000000 2142.679932 14.390000 2.400000 1.850000 188.000000 2152.620117 14.410000 2.410000 1.850000 189.000000 2162.560059 14.430000 2.410000 1.850000 190.000000 2172.500000 14.440000 2.420000 1.850000 191.000000 2182.429932 14.460000 2.430000 1.850000 192.000000 2192.370117 14.470000 2.430000 1.850000 193.000000 2202.300049 14.490000 2.440000 1.850000 194.000000 2212.219971 14.500000 2.440000 1.850000 195.000000 2222.149902 14.510000 2.450000 1.850000 196.000000 2232.070068 14.520000 2.460000 1.850000 197.000000 2241.989990 14.530000 2.460000 1.850000 198.000000 2251.899902 14.540000 2.470000 1.850000 199.000000 2261.820068 14.550000 2.470000 1.850000 200.000000 2271.729980 14.560000 2.480000 1.850000 201.000000 2281.639893 14.570000 2.490000 1.850000 202.000000 2291.540039 14.580000 2.490000 1.850000 203.000000 2301.449951 14.580000 2.500000 1.850000 204.000000 2311.350098 14.590000 2.500000 1.850000 205.000000 2321.250000 14.600000 2.510000 1.850000 206.000000 2331.139893 14.600000 2.520000 1.850000 207.000000 2341.030029 14.610000 2.520000 1.850000 208.000000 2350.919922 14.610000 2.530000 1.850000 209.000000 2360.810059 14.610000 2.530000 1.850000 210.000000 2370.699951 14.610000 2.540000 1.850000 211.000000 2380.580078 14.610000 2.550000 1.850000 212.000000 2390.459961 14.620000 2.550000 1.850000 213.000000 2400.330078 14.620000 2.560000 1.850000 214.000000 2410.209961 14.620000 2.560000 1.850000 215.000000 2420.080078 14.610000 2.570000 1.850000 216.000000 2429.949951 14.610000 2.580000 1.850000 217.000000 2439.810059 14.610000 2.580000 1.850000 218.000000 2449.679932 14.610000 2.590000 1.850000 219.000000 2459.540039 14.600000 2.590000 1.850000 220.000000 2469.399902 14.600000 2.600000 1.850000 221.000000 2479.250000 14.590000 2.610000 1.850000 222.000000 2489.110107 14.590000 2.610000 1.850000 223.000000 2498.959961 14.580000 2.620000 1.850000 224.000000 spectral-0.22.4/spectral/tests/data/ecostress/000077500000000000000000000000001412674721200213245ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/ecostress/a.spectrum.txt000066400000000000000000000243051412674721200241520ustar00rootroot00000000000000Name: Construction Concrete Type: manmade Class: Concrete Subclass: Construction Concrete Particle Size: Solid Sample No.: 0598UUUCNC Owner: National Photographic Interpretation Center Wavelength Range: All Origin: Spectra obtained from the Noncoventional Exploitation FactorsData System of the National Photographic Interpretation Center. Collection Date: N/A Description: Approximately 30-year-old runway construction concrete with (mostly granite) aggregate showing. Sample was light gray and weathered. Original ASTER Spectral Library name was jhu.becknic.manmade.concrete.construction.solid.0598uuu.spectrum.txt Measurement: Directional (10 Degree) Hemispherical Reflectance First Column: X Second Column: Y X Units: Wavelength (micrometers) Y Units:Reflectance (percent) First X Value: 0.3000 Last X Value: 15.0000 Number of X Values: 561 Additional Information: none 0.3000 8.8200 0.3020 9.0000 0.3040 9.1800 0.3060 9.3500 0.3080 9.5200 0.3100 9.6900 0.3120 9.8500 0.3140 10.0200 0.3160 10.1800 0.3180 10.3500 0.3200 10.5200 0.3220 10.6700 0.3240 10.8200 0.3260 10.9700 0.3280 11.1200 0.3300 11.2500 0.3320 11.3900 0.3340 11.5200 0.3360 11.6500 0.3380 11.7800 0.3400 11.9000 0.3420 12.0100 0.3440 12.1200 0.3460 12.2300 0.3480 12.3300 0.3500 12.4300 0.3520 12.5300 0.3540 12.6300 0.3560 12.7300 0.3580 12.8100 0.3600 12.9000 0.3620 12.9800 0.3640 13.0600 0.3660 13.1500 0.3680 13.2500 0.3700 13.3500 0.3720 13.4600 0.3740 13.5800 0.3760 13.7000 0.3780 13.8500 0.3800 14.0100 0.3820 14.1500 0.3840 14.3000 0.3860 14.4500 0.3880 14.6000 0.3900 14.7400 0.3920 14.8900 0.3940 15.0200 0.3960 15.1600 0.3980 15.3000 0.4000 15.4300 0.4020 15.5600 0.4040 15.6800 0.4060 15.8100 0.4080 15.9300 0.4100 16.0500 0.4120 16.1800 0.4140 16.3000 0.4160 16.4300 0.4180 16.5600 0.4200 16.6900 0.4220 16.8300 0.4240 16.9700 0.4260 17.1100 0.4280 17.2600 0.4300 17.4100 0.4320 17.5600 0.4340 17.7200 0.4360 17.8700 0.4380 18.0300 0.4400 18.1800 0.4420 18.3200 0.4440 18.4700 0.4460 18.6200 0.4480 18.7500 0.4500 18.8800 0.4520 19.0100 0.4540 19.1200 0.4560 19.2300 0.4580 19.3300 0.4600 19.4200 0.4620 19.5100 0.4640 19.6000 0.4660 19.7000 0.4680 19.7900 0.4700 19.8800 0.4720 19.9700 0.4740 20.0600 0.4760 20.1500 0.4780 20.2500 0.4800 20.3600 0.4820 20.4800 0.4840 20.6000 0.4860 20.7200 0.4880 20.8400 0.4900 20.9700 0.4920 21.0900 0.4940 21.2200 0.4960 21.3400 0.4980 21.4600 0.5000 21.5900 0.5020 21.7200 0.5040 21.8500 0.5060 21.9800 0.5080 22.1200 0.5100 22.2600 0.5120 22.4100 0.5140 22.5500 0.5160 22.6900 0.5180 22.8400 0.5200 22.9900 0.5220 23.1400 0.5240 23.2900 0.5260 23.4500 0.5280 23.6000 0.5300 23.7500 0.5320 23.9100 0.5340 24.0700 0.5360 24.2300 0.5380 24.3800 0.5400 24.5400 0.5420 24.7100 0.5440 24.8800 0.5460 25.0400 0.5480 25.2100 0.5500 25.3800 0.5520 25.5600 0.5540 25.7300 0.5560 25.9100 0.5580 26.0800 0.5600 26.2600 0.5620 26.4400 0.5640 26.6100 0.5660 26.7700 0.5680 26.9400 0.5700 27.0900 0.5720 27.2500 0.5740 27.3900 0.5760 27.5300 0.5780 27.6600 0.5800 27.7900 0.5820 27.9100 0.5840 28.0300 0.5860 28.1400 0.5880 28.2500 0.5900 28.3400 0.5920 28.4400 0.5940 28.5300 0.5960 28.6200 0.5980 28.7000 0.6000 28.7700 0.6020 28.8400 0.6040 28.9100 0.6060 28.9700 0.6080 29.0300 0.6100 29.0900 0.6120 29.1400 0.6140 29.1900 0.6160 29.2400 0.6180 29.2800 0.6200 29.3300 0.6220 29.3700 0.6240 29.4100 0.6260 29.4400 0.6280 29.4900 0.6300 29.5200 0.6320 29.5600 0.6340 29.6000 0.6360 29.6300 0.6380 29.6700 0.6400 29.7000 0.6420 29.7300 0.6440 29.7600 0.6460 29.7900 0.6480 29.8300 0.6500 29.8600 0.6520 29.8900 0.6540 29.9300 0.6560 29.9600 0.6580 30.0000 0.6600 30.0300 0.6620 30.0700 0.6640 30.1000 0.6660 30.1300 0.6680 30.1700 0.6700 30.2000 0.6720 30.2300 0.6740 30.2800 0.6760 30.3100 0.6780 30.3300 0.6800 30.3600 0.6820 30.3900 0.6840 30.4200 0.6860 30.4500 0.6880 30.4700 0.6900 30.5000 0.6920 30.5200 0.6940 30.5500 0.6960 30.5800 0.6980 30.6100 0.7000 30.6400 0.7020 30.6700 0.7040 30.6900 0.7060 30.7100 0.7080 30.7400 0.7100 30.7700 0.7120 30.7900 0.7140 30.8100 0.7160 30.8300 0.7180 30.8600 0.7200 30.8700 0.7220 30.8900 0.7240 30.9200 0.7260 30.9500 0.7280 30.9600 0.7300 30.9800 0.7320 31.0000 0.7340 31.0200 0.7360 31.0400 0.7380 31.0700 0.7400 31.0800 0.7420 31.0900 0.7440 31.1200 0.7460 31.1400 0.7480 31.1500 0.7500 31.1700 0.7520 31.1800 0.7540 31.1900 0.7560 31.2000 0.7580 31.2100 0.7600 31.2200 0.7620 31.2400 0.7640 31.2600 0.7660 31.2700 0.7680 31.2700 0.7700 31.2800 0.7720 31.3000 0.7740 31.3100 0.7760 31.3200 0.7780 31.3500 0.7800 31.3500 0.7820 31.3600 0.7840 31.3700 0.7860 31.3800 0.7880 31.3800 0.7900 31.3900 0.7920 31.4100 0.7940 31.4000 0.7960 31.4100 0.7980 31.4200 0.8000 31.4200 0.8200 31.4700 0.8400 31.5200 0.8600 31.6000 0.8800 31.8500 0.9000 32.0900 0.9200 32.2000 0.9400 32.4300 0.9600 32.4800 0.9800 32.6200 1.0000 32.8000 1.0200 33.0000 1.0400 33.1800 1.0600 33.4000 1.0800 33.6100 1.1000 33.8500 1.1200 34.0800 1.1400 34.2800 1.1600 34.4600 1.1800 34.6900 1.2000 34.8600 1.2200 35.0500 1.2400 35.2600 1.2600 35.4300 1.2800 35.6000 1.3000 35.7500 1.3200 35.9300 1.3400 36.0900 1.3600 36.2600 1.3800 36.2000 1.4000 35.8200 1.4200 35.5700 1.4400 35.9600 1.4600 36.3500 1.4800 36.7100 1.5000 37.1200 1.5200 37.5000 1.5400 37.8400 1.5600 38.1700 1.5800 38.4600 1.6000 38.7700 1.6200 39.0800 1.6400 39.3800 1.6600 39.6700 1.6800 39.8600 1.7000 40.0400 1.7200 40.1800 1.7400 40.3000 1.7600 40.3800 1.7800 40.5700 1.8000 40.7400 1.8200 40.8300 1.8400 40.9400 1.8600 40.7600 1.8800 39.5400 1.9000 36.3100 1.9200 34.2600 1.9400 34.7000 1.9600 35.5700 1.9800 36.4700 2.0000 37.4600 2.0200 38.2800 2.0400 38.8000 2.0600 39.0700 2.0800 39.3200 2.1000 39.5300 2.1200 39.7700 2.1400 39.7900 2.1600 39.4300 2.1800 38.9100 2.2000 38.2100 2.2200 37.7000 2.2400 37.3500 2.2600 36.7416 2.2800 36.2750 2.3000 35.9044 2.3200 35.7352 2.3400 34.9660 2.3600 34.8206 2.3800 34.5136 2.4000 33.5790 2.4200 32.6856 2.4400 31.5692 2.4600 30.6156 2.4800 29.8670 2.5000 29.6644 2.5200 29.0974 2.5400 28.7850 2.5600 28.8634 2.5800 28.6764 2.6000 28.4702 2.6200 27.8358 2.6400 27.0508 2.6600 25.5036 2.6800 21.4844 2.7000 14.3030 2.7200 9.4060 2.7400 6.3469 2.7600 5.0405 2.7800 4.8091 2.8000 4.8816 2.8200 4.9397 2.8400 4.9237 2.8600 4.9307 2.8800 4.8603 2.9000 4.8560 2.9200 4.6667 2.9400 4.6345 2.9600 4.8271 2.9800 4.9149 3.0000 5.1771 3.0200 5.1980 3.0400 5.0375 3.0600 5.1322 3.0800 5.2523 3.1000 5.3455 3.1200 5.5052 3.1400 5.5437 3.1600 5.5737 3.1800 6.0704 3.2000 6.4096 3.2200 6.5711 3.2400 6.6443 3.2600 7.0590 3.2800 7.3543 3.3000 7.2461 3.3200 7.5285 3.3400 7.4249 3.3600 7.4544 3.3800 7.2683 3.4000 7.1317 3.4200 7.3650 3.4400 7.4940 3.4600 7.8789 3.4800 7.8982 3.5000 8.3130 3.5200 9.1383 3.5400 10.0563 3.5600 10.6520 3.5800 10.9424 3.6000 11.2782 3.6200 11.5528 3.6400 11.9650 3.6600 12.1872 3.6800 12.3642 3.7000 12.4430 3.7200 12.6542 3.7400 12.8210 3.7600 12.7664 3.7800 12.7516 3.8000 12.4942 3.8200 12.1188 3.8400 11.7864 3.8600 11.5824 3.8800 11.5528 3.9000 11.5060 3.9200 11.2302 3.9400 10.6047 3.9600 9.6341 3.9800 9.3752 4.0000 10.2796 4.0200 11.7794 4.0400 12.8064 4.0600 13.5042 4.0800 13.7910 4.1000 13.9234 4.1200 13.9440 4.1400 13.9476 4.1600 13.8720 4.1800 13.8416 4.2000 13.6742 4.2200 13.5838 4.2400 13.4360 4.2600 13.2908 4.2800 13.1508 4.3000 13.0164 4.3200 12.9320 4.3400 12.7978 4.3600 12.5926 4.3800 12.2080 4.4000 11.9764 4.4200 11.6680 4.4400 11.4110 4.4600 11.2896 4.4800 11.1362 4.5000 11.0506 4.5200 11.1932 4.5400 11.3686 4.5600 11.3584 4.5800 11.2912 4.6000 10.9852 4.6200 10.8236 4.6400 10.6282 4.6600 10.2838 4.6800 10.1442 4.7000 10.2800 4.7200 10.4694 4.7400 10.4400 4.7600 10.3384 4.7800 10.2318 4.8000 10.1496 4.8200 10.0226 4.8400 9.7699 4.8600 9.6050 4.8800 9.1652 4.9000 8.8878 4.9200 8.5122 4.9400 8.3623 4.9600 8.2440 4.9800 8.0427 5.0000 7.9500 5.1000 7.5539 5.2000 7.0983 5.3000 6.3350 5.4000 5.9901 5.5000 5.1563 5.6000 4.3943 5.7000 4.5005 5.8000 3.7757 5.9000 2.8825 6.0000 2.1790 6.1000 2.0061 6.2000 2.2873 6.3000 2.9338 6.4000 3.8997 6.5000 3.9615 6.6000 3.5872 6.7000 3.1411 6.8000 3.0410 6.9000 2.9174 7.0000 2.9403 7.1000 2.8419 7.2000 3.0369 7.3000 3.6433 7.4000 4.0744 7.5000 4.3151 7.6000 4.3834 7.7000 4.6854 7.8000 4.9981 7.9000 5.9799 8.0000 7.9769 8.1000 10.6997 8.2000 13.6180 8.3000 14.0769 8.4000 14.0894 8.5000 14.0812 8.6000 12.1757 8.7000 13.1917 8.8000 14.1508 8.9000 14.2597 9.0000 14.2244 9.1000 14.7463 9.2000 15.2267 9.3000 15.3502 9.4000 13.9308 9.5000 12.5550 9.6000 11.8706 9.7000 11.0620 9.8000 10.2582 9.9000 9.7024 10.0000 8.8807 10.1000 8.2421 10.2000 7.2627 10.3000 6.4682 10.4000 5.8536 10.5000 5.3009 10.6000 4.8996 10.7000 4.6324 10.8000 4.4746 10.9000 4.4967 11.0000 4.4709 11.1000 4.3983 11.2000 4.3711 11.3000 4.2910 11.4000 4.1692 11.5000 4.1352 11.6000 3.7895 11.7000 3.5576 11.8000 3.3700 11.9000 3.2080 12.0000 2.9953 12.1000 2.8058 12.2000 2.5455 12.3000 2.3216 12.4000 2.9269 12.5000 3.6709 12.6000 3.5550 12.7000 3.6849 12.8000 4.2039 12.9000 4.0422 13.0000 3.6625 13.1000 3.4205 13.2000 3.2138 13.3000 2.9777 13.4000 2.8796 13.5000 2.7790 13.6000 2.7513 13.7000 2.7299 13.8000 2.7308 13.9000 2.6318 14.0000 2.5303 14.1000 2.4584 14.2000 2.2902 14.3000 2.1323 14.4000 2.4286 14.5000 2.6706 14.6000 2.5285 14.7000 2.3044 14.8000 1.7773 14.9000 2.2710 15.0000 2.7210 spectral-0.22.4/spectral/tests/data/ecostress/b.spectrum.txt000066400000000000000000001107171412674721200241560ustar00rootroot00000000000000Name: lichen off trees Type: non photosynthetic vegetation Class: lichen Genus: Lichen Species: species Sample No.: VH297 Owner: UCSB Wavelength Range: VSWIR Origin: 37.232539; -119.233498; WGS84 Collection Date: 5/10/2014 Description: Samples were collected as part of the HyspIRI Airborne Campaign proposal titled: HyspIRI discrimination of plant species and functional types along a strong environmental temperature gradient. The same materials were processed in the Nicolet and then measured using the ASD. Measurement: Bidirectional reflectance First Column: X Second Column: Y X Units: Wavelength (micrometer) Y Units: Reflectance (percentage) First X Value: 0.35 Last X Value: 2.5 Number of X Values: 2151 Additional Information: 0.3500 1.4710 0.3510 1.0620 0.3520 1.0810 0.3530 1.3340 0.3540 1.4440 0.3550 1.0150 0.3560 0.9690 0.3570 1.2420 0.3580 1.2790 0.3590 1.1910 0.3600 1.1250 0.3610 1.1410 0.3620 1.1870 0.3630 1.0180 0.3640 0.9660 0.3650 1.2720 0.3660 1.3070 0.3670 1.1940 0.3680 1.0820 0.3690 1.1130 0.3700 1.1600 0.3710 1.2430 0.3720 1.2990 0.3730 1.0630 0.3740 1.0670 0.3750 1.2120 0.3760 1.2460 0.3770 1.2460 0.3780 1.2080 0.3790 1.1530 0.3800 1.1320 0.3810 1.1840 0.3820 1.1860 0.3830 1.0890 0.3840 1.0540 0.3850 1.1620 0.3860 1.2400 0.3870 1.1050 0.3880 1.1330 0.3890 1.1060 0.3900 1.0350 0.3910 1.1860 0.3920 1.2290 0.3930 1.1730 0.3940 1.1100 0.3950 1.1710 0.3960 1.2230 0.3970 1.2090 0.3980 1.1370 0.3990 1.1350 0.4000 1.1740 0.4010 1.2110 0.4020 1.2120 0.4030 1.2250 0.4040 1.2280 0.4050 1.2010 0.4060 1.1940 0.4070 1.1680 0.4080 1.1450 0.4090 1.1720 0.4100 1.1870 0.4110 1.1860 0.4120 1.1830 0.4130 1.2170 0.4140 1.2300 0.4150 1.2390 0.4160 1.2640 0.4170 1.2300 0.4180 1.2110 0.4190 1.2400 0.4200 1.3060 0.4210 1.2660 0.4220 1.2060 0.4230 1.2130 0.4240 1.2510 0.4250 1.2740 0.4260 1.2680 0.4270 1.2490 0.4280 1.2580 0.4290 1.2770 0.4300 1.2950 0.4310 1.3120 0.4320 1.3350 0.4330 1.3550 0.4340 1.3610 0.4350 1.3630 0.4360 1.3810 0.4370 1.4150 0.4380 1.4500 0.4390 1.4560 0.4400 1.4650 0.4410 1.4950 0.4420 1.5090 0.4430 1.5480 0.4440 1.6010 0.4450 1.6440 0.4460 1.6730 0.4470 1.7150 0.4480 1.7710 0.4490 1.8150 0.4500 1.9030 0.4510 2.0010 0.4520 2.0770 0.4530 2.1590 0.4540 2.2540 0.4550 2.3620 0.4560 2.4760 0.4570 2.6010 0.4580 2.7370 0.4590 2.8860 0.4600 3.0640 0.4610 3.2380 0.4620 3.4120 0.4630 3.5980 0.4640 3.7890 0.4650 3.9850 0.4660 4.1970 0.4670 4.4400 0.4680 4.6750 0.4690 4.9070 0.4700 5.1470 0.4710 5.3950 0.4720 5.6430 0.4730 5.8920 0.4740 6.1490 0.4750 6.4150 0.4760 6.6930 0.4770 6.9800 0.4780 7.2530 0.4790 7.5070 0.4800 7.7720 0.4810 8.0720 0.4820 8.3590 0.4830 8.6270 0.4840 8.8880 0.4850 9.1640 0.4860 9.4210 0.4870 9.6610 0.4880 9.8940 0.4890 10.1350 0.4900 10.3510 0.4910 10.5470 0.4920 10.7410 0.4930 10.9410 0.4940 11.1140 0.4950 11.2580 0.4960 11.4130 0.4970 11.5580 0.4980 11.6930 0.4990 11.8220 0.5000 11.9470 0.5010 12.0590 0.5020 12.1630 0.5030 12.2740 0.5040 12.3650 0.5050 12.4490 0.5060 12.5390 0.5070 12.6300 0.5080 12.7130 0.5090 12.7940 0.5100 12.8810 0.5110 12.9630 0.5120 13.0420 0.5130 13.1220 0.5140 13.2090 0.5150 13.2810 0.5160 13.3530 0.5170 13.4410 0.5180 13.5300 0.5190 13.6130 0.5200 13.6920 0.5210 13.7710 0.5220 13.8470 0.5230 13.9230 0.5240 14.0020 0.5250 14.0760 0.5260 14.1480 0.5270 14.2180 0.5280 14.2840 0.5290 14.3420 0.5300 14.3990 0.5310 14.4550 0.5320 14.4970 0.5330 14.5370 0.5340 14.5800 0.5350 14.6240 0.5360 14.6530 0.5370 14.6990 0.5380 14.7610 0.5390 14.8150 0.5400 14.8930 0.5410 14.9950 0.5420 15.1100 0.5430 15.2110 0.5440 15.2970 0.5450 15.3700 0.5460 15.4320 0.5470 15.4870 0.5480 15.5460 0.5490 15.6120 0.5500 15.6860 0.5510 15.7630 0.5520 15.8350 0.5530 15.8960 0.5540 15.9550 0.5550 16.0180 0.5560 16.0830 0.5570 16.1460 0.5580 16.2090 0.5590 16.2690 0.5600 16.3280 0.5610 16.3890 0.5620 16.4490 0.5630 16.5020 0.5640 16.5460 0.5650 16.5920 0.5660 16.6340 0.5670 16.6710 0.5680 16.7150 0.5690 16.7500 0.5700 16.7840 0.5710 16.8220 0.5720 16.8630 0.5730 16.9060 0.5740 16.9490 0.5750 16.9920 0.5760 17.0280 0.5770 17.0700 0.5780 17.1210 0.5790 17.1720 0.5800 17.2190 0.5810 17.2590 0.5820 17.2910 0.5830 17.3190 0.5840 17.3430 0.5850 17.3640 0.5860 17.3860 0.5870 17.4140 0.5880 17.4350 0.5890 17.4360 0.5900 17.4240 0.5910 17.4170 0.5920 17.4210 0.5930 17.4330 0.5940 17.4470 0.5950 17.4510 0.5960 17.4440 0.5970 17.4460 0.5980 17.4440 0.5990 17.4340 0.6000 17.4190 0.6010 17.4030 0.6020 17.3810 0.6030 17.3540 0.6040 17.3310 0.6050 17.3120 0.6060 17.2930 0.6070 17.2730 0.6080 17.2540 0.6090 17.2550 0.6100 17.2820 0.6110 17.3290 0.6120 17.3250 0.6130 17.2980 0.6140 17.2760 0.6150 17.2760 0.6160 17.2850 0.6170 17.2990 0.6180 17.3190 0.6190 17.3460 0.6200 17.3730 0.6210 17.4030 0.6220 17.4430 0.6230 17.4770 0.6240 17.5090 0.6250 17.5430 0.6260 17.5790 0.6270 17.6150 0.6280 17.6480 0.6290 17.6750 0.6300 17.7100 0.6310 17.7390 0.6320 17.7550 0.6330 17.7600 0.6340 17.7650 0.6350 17.7670 0.6360 17.7620 0.6370 17.7540 0.6380 17.7330 0.6390 17.7020 0.6400 17.6660 0.6410 17.6280 0.6420 17.5880 0.6430 17.5480 0.6440 17.5060 0.6450 17.4650 0.6460 17.4250 0.6470 17.3910 0.6480 17.3600 0.6490 17.3320 0.6500 17.3080 0.6510 17.2900 0.6520 17.2720 0.6530 17.2480 0.6540 17.2190 0.6550 17.1970 0.6560 17.1650 0.6570 17.1200 0.6580 17.0690 0.6590 17.0200 0.6600 16.9700 0.6610 16.9160 0.6620 16.8610 0.6630 16.8020 0.6640 16.7490 0.6650 16.7040 0.6660 16.6600 0.6670 16.6250 0.6680 16.5980 0.6690 16.5720 0.6700 16.5610 0.6710 16.5570 0.6720 16.5570 0.6730 16.5610 0.6740 16.5780 0.6750 16.6040 0.6760 16.6310 0.6770 16.6660 0.6780 16.7100 0.6790 16.7610 0.6800 16.8210 0.6810 16.8870 0.6820 16.9670 0.6830 17.0610 0.6840 17.1610 0.6850 17.2760 0.6860 17.4110 0.6870 17.5680 0.6880 17.7470 0.6890 17.9510 0.6900 18.1800 0.6910 18.4310 0.6920 18.7070 0.6930 19.0060 0.6940 19.3230 0.6950 19.6420 0.6960 19.9720 0.6970 20.3100 0.6980 20.6490 0.6990 20.9830 0.7000 21.3190 0.7010 21.6530 0.7020 21.9780 0.7030 22.3040 0.7040 22.6280 0.7050 22.9470 0.7060 23.2640 0.7070 23.5770 0.7080 23.8890 0.7090 24.2020 0.7100 24.5090 0.7110 24.8170 0.7120 25.1260 0.7130 25.4290 0.7140 25.7350 0.7150 26.0370 0.7160 26.3310 0.7170 26.6280 0.7180 26.9210 0.7190 27.2140 0.7200 27.5080 0.7210 27.7880 0.7220 28.0620 0.7230 28.3320 0.7240 28.6000 0.7250 28.8670 0.7260 29.1240 0.7270 29.3660 0.7280 29.6060 0.7290 29.8390 0.7300 30.0660 0.7310 30.2830 0.7320 30.4870 0.7330 30.6840 0.7340 30.8780 0.7350 31.0700 0.7360 31.2480 0.7370 31.4200 0.7380 31.5890 0.7390 31.7430 0.7400 31.8940 0.7410 32.0440 0.7420 32.1910 0.7430 32.3310 0.7440 32.4670 0.7450 32.6000 0.7460 32.7270 0.7470 32.8510 0.7480 32.9740 0.7490 33.0980 0.7500 33.2250 0.7510 33.3420 0.7520 33.4540 0.7530 33.5710 0.7540 33.6890 0.7550 33.8030 0.7560 33.9110 0.7570 34.0180 0.7580 34.1240 0.7590 34.2300 0.7600 34.3360 0.7610 34.4340 0.7620 34.5290 0.7630 34.6230 0.7640 34.7140 0.7650 34.8000 0.7660 34.8850 0.7670 34.9690 0.7680 35.0530 0.7690 35.1210 0.7700 35.1920 0.7710 35.2690 0.7720 35.3340 0.7730 35.3980 0.7740 35.4620 0.7750 35.5260 0.7760 35.5850 0.7770 35.6430 0.7780 35.7020 0.7790 35.7600 0.7800 35.8140 0.7810 35.8680 0.7820 35.9210 0.7830 35.9700 0.7840 36.0170 0.7850 36.0660 0.7860 36.1160 0.7870 36.1670 0.7880 36.2140 0.7890 36.2590 0.7900 36.3070 0.7910 36.3530 0.7920 36.3990 0.7930 36.4440 0.7940 36.4890 0.7950 36.5340 0.7960 36.5790 0.7970 36.6250 0.7980 36.6750 0.7990 36.7180 0.8000 36.7580 0.8010 36.8010 0.8020 36.8510 0.8030 36.9010 0.8040 36.9460 0.8050 36.9840 0.8060 37.0270 0.8070 37.0740 0.8080 37.1220 0.8090 37.1740 0.8100 37.2200 0.8110 37.2650 0.8120 37.3110 0.8130 37.3600 0.8140 37.4100 0.8150 37.4570 0.8160 37.5000 0.8170 37.5480 0.8180 37.5940 0.8190 37.6410 0.8200 37.6960 0.8210 37.7480 0.8220 37.7950 0.8230 37.8400 0.8240 37.8920 0.8250 37.9440 0.8260 37.9950 0.8270 38.0470 0.8280 38.1010 0.8290 38.1550 0.8300 38.2030 0.8310 38.2420 0.8320 38.2960 0.8330 38.3490 0.8340 38.3990 0.8350 38.4460 0.8360 38.4960 0.8370 38.5470 0.8380 38.5980 0.8390 38.6490 0.8400 38.6940 0.8410 38.7380 0.8420 38.7830 0.8430 38.8360 0.8440 38.8820 0.8450 38.9320 0.8460 38.9870 0.8470 39.0320 0.8480 39.0720 0.8490 39.1130 0.8500 39.1560 0.8510 39.2070 0.8520 39.2530 0.8530 39.2980 0.8540 39.3490 0.8550 39.3900 0.8560 39.4240 0.8570 39.4600 0.8580 39.5030 0.8590 39.5440 0.8600 39.5810 0.8610 39.6160 0.8620 39.6500 0.8630 39.6900 0.8640 39.7280 0.8650 39.7570 0.8660 39.8020 0.8670 39.8430 0.8680 39.8740 0.8690 39.8980 0.8700 39.9360 0.8710 39.9670 0.8720 39.9930 0.8730 40.0250 0.8740 40.0560 0.8750 40.0880 0.8760 40.1190 0.8770 40.1460 0.8780 40.1790 0.8790 40.2060 0.8800 40.2250 0.8810 40.2500 0.8820 40.2790 0.8830 40.3060 0.8840 40.3310 0.8850 40.3610 0.8860 40.3820 0.8870 40.4010 0.8880 40.4220 0.8890 40.4440 0.8900 40.4690 0.8910 40.4920 0.8920 40.5080 0.8930 40.5210 0.8940 40.5440 0.8950 40.5700 0.8960 40.5840 0.8970 40.5970 0.8980 40.6130 0.8990 40.6350 0.9000 40.6610 0.9010 40.6740 0.9020 40.6830 0.9030 40.7010 0.9040 40.7330 0.9050 40.7470 0.9060 40.7550 0.9070 40.7690 0.9080 40.7850 0.9090 40.8050 0.9100 40.8220 0.9110 40.8320 0.9120 40.8440 0.9130 40.8640 0.9140 40.8840 0.9150 40.8990 0.9160 40.9170 0.9170 40.9360 0.9180 40.9610 0.9190 40.9910 0.9200 41.0100 0.9210 41.0210 0.9220 41.0370 0.9230 41.0640 0.9240 41.0900 0.9250 41.1160 0.9260 41.1420 0.9270 41.1650 0.9280 41.1760 0.9290 41.1950 0.9300 41.2240 0.9310 41.2500 0.9320 41.2680 0.9330 41.2920 0.9340 41.3230 0.9350 41.3450 0.9360 41.3660 0.9370 41.3900 0.9380 41.4170 0.9390 41.4400 0.9400 41.4570 0.9410 41.4830 0.9420 41.5190 0.9430 41.5400 0.9440 41.5590 0.9450 41.5740 0.9460 41.5900 0.9470 41.6240 0.9480 41.6480 0.9490 41.6750 0.9500 41.7140 0.9510 41.7410 0.9520 41.7500 0.9530 41.7590 0.9540 41.7820 0.9550 41.8050 0.9560 41.8270 0.9570 41.8420 0.9580 41.8470 0.9590 41.8740 0.9600 41.8900 0.9610 41.9050 0.9620 41.9350 0.9630 41.9480 0.9640 41.9630 0.9650 41.9740 0.9660 41.9710 0.9670 41.9830 0.9680 41.9980 0.9690 42.0190 0.9700 42.0440 0.9710 42.0510 0.9720 42.0350 0.9730 42.0230 0.9740 42.0430 0.9750 42.0630 0.9760 42.0650 0.9770 42.0530 0.9780 42.0490 0.9790 42.0840 0.9800 42.1090 0.9810 42.1150 0.9820 42.1180 0.9830 42.1140 0.9840 42.1320 0.9850 42.1590 0.9860 42.1730 0.9870 42.2080 0.9880 42.2340 0.9890 42.2530 0.9900 42.2830 0.9910 42.2970 0.9920 42.3170 0.9930 42.3370 0.9940 42.3420 0.9950 42.3550 0.9960 42.3950 0.9970 42.4450 0.9980 42.4790 0.9990 42.5030 1.0000 42.5710 1.0010 42.6480 1.0020 42.7170 1.0030 42.7560 1.0040 42.7980 1.0050 42.8410 1.0060 42.8670 1.0070 42.8840 1.0080 42.9140 1.0090 42.9400 1.0100 42.9760 1.0110 43.0310 1.0120 43.0730 1.0130 43.1120 1.0140 43.1490 1.0150 43.1830 1.0160 43.2130 1.0170 43.2440 1.0180 43.2670 1.0190 43.2960 1.0200 43.3330 1.0210 43.3620 1.0220 43.3980 1.0230 43.4320 1.0240 43.4650 1.0250 43.4940 1.0260 43.5240 1.0270 43.5500 1.0280 43.5660 1.0290 43.5910 1.0300 43.6210 1.0310 43.6490 1.0320 43.6780 1.0330 43.7100 1.0340 43.7370 1.0350 43.7650 1.0360 43.8040 1.0370 43.8400 1.0380 43.8690 1.0390 43.8970 1.0400 43.9250 1.0410 43.9440 1.0420 43.9690 1.0430 44.0010 1.0440 44.0270 1.0450 44.0480 1.0460 44.0720 1.0470 44.0930 1.0480 44.1170 1.0490 44.1440 1.0500 44.1700 1.0510 44.1960 1.0520 44.2280 1.0530 44.2550 1.0540 44.2830 1.0550 44.3080 1.0560 44.3310 1.0570 44.3570 1.0580 44.3860 1.0590 44.4100 1.0600 44.4370 1.0610 44.4600 1.0620 44.4730 1.0630 44.4970 1.0640 44.5270 1.0650 44.5520 1.0660 44.5850 1.0670 44.6090 1.0680 44.6280 1.0690 44.6370 1.0700 44.6580 1.0710 44.6780 1.0720 44.6990 1.0730 44.7240 1.0740 44.7480 1.0750 44.7690 1.0760 44.7930 1.0770 44.8180 1.0780 44.8450 1.0790 44.8740 1.0800 44.9000 1.0810 44.9280 1.0820 44.9540 1.0830 44.9760 1.0840 44.9990 1.0850 45.0230 1.0860 45.0520 1.0870 45.0720 1.0880 45.0870 1.0890 45.1090 1.0900 45.1330 1.0910 45.1530 1.0920 45.1760 1.0930 45.1970 1.0940 45.2090 1.0950 45.2290 1.0960 45.2570 1.0970 45.2810 1.0980 45.3050 1.0990 45.3280 1.1000 45.3480 1.1010 45.3680 1.1020 45.3870 1.1030 45.4040 1.1040 45.4240 1.1050 45.4420 1.1060 45.4620 1.1070 45.4880 1.1080 45.5040 1.1090 45.5140 1.1100 45.5300 1.1110 45.5410 1.1120 45.5540 1.1130 45.5730 1.1140 45.5890 1.1150 45.6030 1.1160 45.6140 1.1170 45.6230 1.1180 45.6320 1.1190 45.6390 1.1200 45.6480 1.1210 45.6560 1.1220 45.6670 1.1230 45.6760 1.1240 45.6800 1.1250 45.6870 1.1260 45.6970 1.1270 45.6980 1.1280 45.6970 1.1290 45.6900 1.1300 45.6740 1.1310 45.6610 1.1320 45.6440 1.1330 45.6320 1.1340 45.6250 1.1350 45.6150 1.1360 45.6070 1.1370 45.5980 1.1380 45.5890 1.1390 45.5780 1.1400 45.5740 1.1410 45.5730 1.1420 45.5710 1.1430 45.5690 1.1440 45.5640 1.1450 45.5540 1.1460 45.5480 1.1470 45.5440 1.1480 45.5360 1.1490 45.5280 1.1500 45.5200 1.1510 45.5130 1.1520 45.5010 1.1530 45.4850 1.1540 45.4690 1.1550 45.4450 1.1560 45.4260 1.1570 45.4050 1.1580 45.3890 1.1590 45.3710 1.1600 45.3440 1.1610 45.3140 1.1620 45.2880 1.1630 45.2670 1.1640 45.2470 1.1650 45.2280 1.1660 45.2150 1.1670 45.1950 1.1680 45.1730 1.1690 45.1500 1.1700 45.1250 1.1710 45.1030 1.1720 45.0810 1.1730 45.0560 1.1740 45.0310 1.1750 45.0070 1.1760 44.9780 1.1770 44.9500 1.1780 44.9270 1.1790 44.9000 1.1800 44.8730 1.1810 44.8460 1.1820 44.8140 1.1830 44.7820 1.1840 44.7510 1.1850 44.7200 1.1860 44.6920 1.1870 44.6680 1.1880 44.6450 1.1890 44.6250 1.1900 44.6060 1.1910 44.5880 1.1920 44.5720 1.1930 44.5560 1.1940 44.5430 1.1950 44.5320 1.1960 44.5210 1.1970 44.5080 1.1980 44.5010 1.1990 44.4970 1.2000 44.4930 1.2010 44.4980 1.2020 44.5030 1.2030 44.5050 1.2040 44.5130 1.2050 44.5210 1.2060 44.5340 1.2070 44.5510 1.2080 44.5640 1.2090 44.5830 1.2100 44.6060 1.2110 44.6280 1.2120 44.6500 1.2130 44.6810 1.2140 44.7140 1.2150 44.7470 1.2160 44.7870 1.2170 44.8270 1.2180 44.8660 1.2190 44.9100 1.2200 44.9530 1.2210 44.9990 1.2220 45.0420 1.2230 45.0840 1.2240 45.1290 1.2250 45.1710 1.2260 45.2180 1.2270 45.2670 1.2280 45.3150 1.2290 45.3660 1.2300 45.4120 1.2310 45.4510 1.2320 45.4910 1.2330 45.5340 1.2340 45.5790 1.2350 45.6300 1.2360 45.6780 1.2370 45.7260 1.2380 45.7700 1.2390 45.8100 1.2400 45.8520 1.2410 45.8960 1.2420 45.9360 1.2430 45.9770 1.2440 46.0130 1.2450 46.0500 1.2460 46.0850 1.2470 46.1230 1.2480 46.1630 1.2490 46.1990 1.2500 46.2350 1.2510 46.2700 1.2520 46.3020 1.2530 46.3360 1.2540 46.3680 1.2550 46.4030 1.2560 46.4370 1.2570 46.4670 1.2580 46.5010 1.2590 46.5310 1.2600 46.5570 1.2610 46.5860 1.2620 46.6160 1.2630 46.6430 1.2640 46.6680 1.2650 46.6930 1.2660 46.7180 1.2670 46.7410 1.2680 46.7690 1.2690 46.7970 1.2700 46.8230 1.2710 46.8500 1.2720 46.8760 1.2730 46.9040 1.2740 46.9330 1.2750 46.9580 1.2760 46.9790 1.2770 47.0030 1.2780 47.0260 1.2790 47.0490 1.2800 47.0790 1.2810 47.1100 1.2820 47.1380 1.2830 47.1680 1.2840 47.2030 1.2850 47.2350 1.2860 47.2590 1.2870 47.2890 1.2880 47.3230 1.2890 47.3490 1.2900 47.3740 1.2910 47.4100 1.2920 47.4380 1.2930 47.4660 1.2940 47.4970 1.2950 47.5220 1.2960 47.5470 1.2970 47.5720 1.2980 47.6010 1.2990 47.6250 1.3000 47.6460 1.3010 47.6660 1.3020 47.6800 1.3030 47.6950 1.3040 47.7130 1.3050 47.7290 1.3060 47.7430 1.3070 47.7590 1.3080 47.7760 1.3090 47.7870 1.3100 47.7950 1.3110 47.8030 1.3120 47.8120 1.3130 47.8230 1.3140 47.8300 1.3150 47.8370 1.3160 47.8380 1.3170 47.8330 1.3180 47.8270 1.3190 47.8220 1.3200 47.8140 1.3210 47.8030 1.3220 47.7910 1.3230 47.7750 1.3240 47.7570 1.3250 47.7420 1.3260 47.7230 1.3270 47.7030 1.3280 47.6810 1.3290 47.6540 1.3300 47.6250 1.3310 47.5950 1.3320 47.5660 1.3330 47.5320 1.3340 47.4950 1.3350 47.4580 1.3360 47.4120 1.3370 47.3660 1.3380 47.3140 1.3390 47.2580 1.3400 47.2010 1.3410 47.1420 1.3420 47.0840 1.3430 47.0230 1.3440 46.9620 1.3450 46.9010 1.3460 46.8410 1.3470 46.7740 1.3480 46.7030 1.3490 46.6320 1.3500 46.5550 1.3510 46.4800 1.3520 46.4100 1.3530 46.3430 1.3540 46.2730 1.3550 46.2050 1.3560 46.1360 1.3570 46.0650 1.3580 45.9980 1.3590 45.9340 1.3600 45.8700 1.3610 45.8120 1.3620 45.7570 1.3630 45.7030 1.3640 45.6510 1.3650 45.5970 1.3660 45.5460 1.3670 45.4950 1.3680 45.4430 1.3690 45.3970 1.3700 45.3490 1.3710 45.3020 1.3720 45.2600 1.3730 45.2180 1.3740 45.1810 1.3750 45.1420 1.3760 45.1000 1.3770 45.0560 1.3780 45.0060 1.3790 44.9560 1.3800 44.9040 1.3810 44.8480 1.3820 44.7900 1.3830 44.7300 1.3840 44.6620 1.3850 44.5900 1.3860 44.5110 1.3870 44.4260 1.3880 44.3350 1.3890 44.2380 1.3900 44.1360 1.3910 44.0280 1.3920 43.9140 1.3930 43.7900 1.3940 43.6580 1.3950 43.5180 1.3960 43.3670 1.3970 43.2080 1.3980 43.0400 1.3990 42.8600 1.4000 42.6700 1.4010 42.4700 1.4020 42.2570 1.4030 42.0290 1.4040 41.7920 1.4050 41.5450 1.4060 41.2880 1.4070 41.0230 1.4080 40.7470 1.4090 40.4620 1.4100 40.1710 1.4110 39.8720 1.4120 39.5710 1.4130 39.2650 1.4140 38.9550 1.4150 38.6470 1.4160 38.3360 1.4170 38.0280 1.4180 37.7280 1.4190 37.4300 1.4200 37.1390 1.4210 36.8520 1.4220 36.5660 1.4230 36.2940 1.4240 36.0310 1.4250 35.7780 1.4260 35.5370 1.4270 35.3060 1.4280 35.0830 1.4290 34.8720 1.4300 34.6710 1.4310 34.4810 1.4320 34.3020 1.4330 34.1350 1.4340 33.9810 1.4350 33.8380 1.4360 33.7040 1.4370 33.5850 1.4380 33.4730 1.4390 33.3720 1.4400 33.2830 1.4410 33.2010 1.4420 33.1290 1.4430 33.0650 1.4440 33.0070 1.4450 32.9560 1.4460 32.9120 1.4470 32.8760 1.4480 32.8460 1.4490 32.8170 1.4500 32.7960 1.4510 32.7800 1.4520 32.7670 1.4530 32.7600 1.4540 32.7570 1.4550 32.7580 1.4560 32.7620 1.4570 32.7670 1.4580 32.7740 1.4590 32.7810 1.4600 32.7920 1.4610 32.8070 1.4620 32.8250 1.4630 32.8450 1.4640 32.8660 1.4650 32.8900 1.4660 32.9160 1.4670 32.9420 1.4680 32.9710 1.4690 33.0020 1.4700 33.0330 1.4710 33.0680 1.4720 33.1050 1.4730 33.1430 1.4740 33.1810 1.4750 33.2190 1.4760 33.2580 1.4770 33.2970 1.4780 33.3360 1.4790 33.3760 1.4800 33.4180 1.4810 33.4590 1.4820 33.5000 1.4830 33.5420 1.4840 33.5820 1.4850 33.6230 1.4860 33.6670 1.4870 33.7120 1.4880 33.7560 1.4890 33.8010 1.4900 33.8460 1.4910 33.8910 1.4920 33.9380 1.4930 33.9870 1.4940 34.0360 1.4950 34.0860 1.4960 34.1360 1.4970 34.1860 1.4980 34.2340 1.4990 34.2790 1.5000 34.3270 1.5010 34.3730 1.5020 34.4220 1.5030 34.4750 1.5040 34.5240 1.5050 34.5750 1.5060 34.6240 1.5070 34.6690 1.5080 34.7170 1.5090 34.7650 1.5100 34.8120 1.5110 34.8590 1.5120 34.9060 1.5130 34.9510 1.5140 34.9960 1.5150 35.0410 1.5160 35.0860 1.5170 35.1300 1.5180 35.1730 1.5190 35.2160 1.5200 35.2550 1.5210 35.2900 1.5220 35.3260 1.5230 35.3610 1.5240 35.3940 1.5250 35.4280 1.5260 35.4590 1.5270 35.4910 1.5280 35.5200 1.5290 35.5480 1.5300 35.5770 1.5310 35.6010 1.5320 35.6230 1.5330 35.6410 1.5340 35.6560 1.5350 35.6710 1.5360 35.6840 1.5370 35.7010 1.5380 35.7160 1.5390 35.7260 1.5400 35.7370 1.5410 35.7460 1.5420 35.7550 1.5430 35.7620 1.5440 35.7700 1.5450 35.7770 1.5460 35.7860 1.5470 35.7960 1.5480 35.8040 1.5490 35.8100 1.5500 35.8140 1.5510 35.8180 1.5520 35.8230 1.5530 35.8270 1.5540 35.8340 1.5550 35.8390 1.5560 35.8410 1.5570 35.8480 1.5580 35.8520 1.5590 35.8540 1.5600 35.8570 1.5610 35.8600 1.5620 35.8640 1.5630 35.8690 1.5640 35.8740 1.5650 35.8820 1.5660 35.8880 1.5670 35.8980 1.5680 35.9100 1.5690 35.9200 1.5700 35.9340 1.5710 35.9490 1.5720 35.9630 1.5730 35.9800 1.5740 35.9940 1.5750 36.0100 1.5760 36.0290 1.5770 36.0510 1.5780 36.0790 1.5790 36.1070 1.5800 36.1360 1.5810 36.1660 1.5820 36.1960 1.5830 36.2300 1.5840 36.2630 1.5850 36.2920 1.5860 36.3250 1.5870 36.3630 1.5880 36.4010 1.5890 36.4390 1.5900 36.4790 1.5910 36.5200 1.5920 36.5630 1.5930 36.6080 1.5940 36.6530 1.5950 36.6960 1.5960 36.7360 1.5970 36.7780 1.5980 36.8200 1.5990 36.8630 1.6000 36.9100 1.6010 36.9590 1.6020 37.0070 1.6030 37.0560 1.6040 37.1010 1.6050 37.1410 1.6060 37.1810 1.6070 37.2220 1.6080 37.2640 1.6090 37.3090 1.6100 37.3550 1.6110 37.3960 1.6120 37.4340 1.6130 37.4740 1.6140 37.5140 1.6150 37.5540 1.6160 37.5920 1.6170 37.6280 1.6180 37.6640 1.6190 37.7000 1.6200 37.7380 1.6210 37.7770 1.6220 37.8130 1.6230 37.8450 1.6240 37.8780 1.6250 37.9080 1.6260 37.9370 1.6270 37.9670 1.6280 37.9990 1.6290 38.0290 1.6300 38.0580 1.6310 38.0870 1.6320 38.1170 1.6330 38.1430 1.6340 38.1660 1.6350 38.1900 1.6360 38.2110 1.6370 38.2330 1.6380 38.2550 1.6390 38.2770 1.6400 38.2990 1.6410 38.3210 1.6420 38.3430 1.6430 38.3650 1.6440 38.3860 1.6450 38.4000 1.6460 38.4190 1.6470 38.4350 1.6480 38.4500 1.6490 38.4630 1.6500 38.4730 1.6510 38.4760 1.6520 38.4740 1.6530 38.4700 1.6540 38.4580 1.6550 38.4420 1.6560 38.4180 1.6570 38.3850 1.6580 38.3430 1.6590 38.2940 1.6600 38.2450 1.6610 38.1930 1.6620 38.1410 1.6630 38.0910 1.6640 38.0410 1.6650 37.9980 1.6660 37.9600 1.6670 37.9270 1.6680 37.8990 1.6690 37.8710 1.6700 37.8450 1.6710 37.8220 1.6720 37.7990 1.6730 37.7730 1.6740 37.7440 1.6750 37.7080 1.6760 37.6710 1.6770 37.6320 1.6780 37.5930 1.6790 37.5520 1.6800 37.5060 1.6810 37.4620 1.6820 37.4180 1.6830 37.3720 1.6840 37.3290 1.6850 37.2870 1.6860 37.2400 1.6870 37.1970 1.6880 37.1560 1.6890 37.1120 1.6900 37.0740 1.6910 37.0390 1.6920 37.0020 1.6930 36.9690 1.6940 36.9280 1.6950 36.8820 1.6960 36.8360 1.6970 36.7840 1.6980 36.7360 1.6990 36.6900 1.7000 36.6430 1.7010 36.5950 1.7020 36.5480 1.7030 36.4990 1.7040 36.4540 1.7050 36.4130 1.7060 36.3710 1.7070 36.3320 1.7080 36.2930 1.7090 36.2540 1.7100 36.2170 1.7110 36.1830 1.7120 36.1460 1.7130 36.1110 1.7140 36.0760 1.7150 36.0420 1.7160 36.0100 1.7170 35.9740 1.7180 35.9380 1.7190 35.9010 1.7200 35.8630 1.7210 35.8300 1.7220 35.7940 1.7230 35.7590 1.7240 35.7290 1.7250 35.6960 1.7260 35.6690 1.7270 35.6480 1.7280 35.6300 1.7290 35.6180 1.7300 35.6110 1.7310 35.6070 1.7320 35.6030 1.7330 35.6070 1.7340 35.6230 1.7350 35.6340 1.7360 35.6530 1.7370 35.6670 1.7380 35.6770 1.7390 35.6860 1.7400 35.6920 1.7410 35.7000 1.7420 35.7080 1.7430 35.7140 1.7440 35.7240 1.7450 35.7240 1.7460 35.7140 1.7470 35.7080 1.7480 35.6950 1.7490 35.6870 1.7500 35.6820 1.7510 35.6750 1.7520 35.6670 1.7530 35.6580 1.7540 35.6470 1.7550 35.6410 1.7560 35.6320 1.7570 35.6280 1.7580 35.6230 1.7590 35.6180 1.7600 35.6220 1.7610 35.6240 1.7620 35.6290 1.7630 35.6330 1.7640 35.6440 1.7650 35.6610 1.7660 35.6780 1.7670 35.6970 1.7680 35.7170 1.7690 35.7370 1.7700 35.7570 1.7710 35.7790 1.7720 35.8010 1.7730 35.8250 1.7740 35.8460 1.7750 35.8690 1.7760 35.8900 1.7770 35.9090 1.7780 35.9300 1.7790 35.9540 1.7800 35.9750 1.7810 35.9970 1.7820 36.0160 1.7830 36.0300 1.7840 36.0490 1.7850 36.0680 1.7860 36.0860 1.7870 36.1060 1.7880 36.1240 1.7890 36.1430 1.7900 36.1680 1.7910 36.1910 1.7920 36.2130 1.7930 36.2340 1.7940 36.2520 1.7950 36.2740 1.7960 36.2940 1.7970 36.3200 1.7980 36.3520 1.7990 36.3830 1.8000 36.4190 1.8010 36.4470 1.8020 36.4690 1.8030 36.4950 1.8040 36.5160 1.8050 36.5430 1.8060 36.5740 1.8070 36.6070 1.8080 36.6460 1.8090 36.6770 1.8100 36.7070 1.8110 36.7340 1.8120 36.7570 1.8130 36.7910 1.8140 36.8260 1.8150 36.8540 1.8160 36.8810 1.8170 36.9140 1.8180 36.9480 1.8190 36.9940 1.8200 37.0330 1.8210 37.0640 1.8220 37.0990 1.8230 37.1310 1.8240 37.1670 1.8250 37.2080 1.8260 37.2450 1.8270 37.2840 1.8280 37.3190 1.8290 37.3500 1.8300 37.3430 1.8310 37.2580 1.8320 37.2590 1.8330 37.2950 1.8340 37.3340 1.8350 37.3760 1.8360 37.4140 1.8370 37.4520 1.8380 37.4940 1.8390 37.5370 1.8400 37.5870 1.8410 37.6280 1.8420 37.6690 1.8430 37.7060 1.8440 37.7360 1.8450 37.7640 1.8460 37.7890 1.8470 37.8140 1.8480 37.8400 1.8490 37.8610 1.8500 37.8810 1.8510 37.9010 1.8520 37.9180 1.8530 37.9340 1.8540 37.9530 1.8550 37.9660 1.8560 37.9770 1.8570 37.9890 1.8580 37.9920 1.8590 37.9920 1.8600 37.9880 1.8610 37.9880 1.8620 37.9790 1.8630 37.9680 1.8640 37.9610 1.8650 37.9490 1.8660 37.9230 1.8670 37.8910 1.8680 37.8600 1.8690 37.8210 1.8700 37.7810 1.8710 37.7430 1.8720 37.7020 1.8730 37.6360 1.8740 37.5650 1.8750 37.4860 1.8760 37.3970 1.8770 37.3110 1.8780 37.2120 1.8790 37.0980 1.8800 36.9780 1.8810 36.8490 1.8820 36.7120 1.8830 36.5720 1.8840 36.4230 1.8850 36.2580 1.8860 36.0750 1.8870 35.8760 1.8880 35.6620 1.8890 35.4360 1.8900 35.2070 1.8910 34.9680 1.8920 34.7200 1.8930 34.4620 1.8940 34.1960 1.8950 33.9180 1.8960 33.6350 1.8970 33.3520 1.8980 33.0610 1.8990 32.7740 1.9000 32.4820 1.9010 32.1770 1.9020 31.8840 1.9030 31.5910 1.9040 31.3040 1.9050 31.0320 1.9060 30.7660 1.9070 30.5120 1.9080 30.2700 1.9090 30.0300 1.9100 29.8050 1.9110 29.5900 1.9120 29.3850 1.9130 29.1940 1.9140 29.0130 1.9150 28.8460 1.9160 28.6980 1.9170 28.5640 1.9180 28.4430 1.9190 28.3300 1.9200 28.2260 1.9210 28.1300 1.9220 28.0440 1.9230 27.9670 1.9240 27.9010 1.9250 27.8460 1.9260 27.7940 1.9270 27.7550 1.9280 27.7300 1.9290 27.7080 1.9300 27.6960 1.9310 27.6970 1.9320 27.6960 1.9330 27.7040 1.9340 27.7200 1.9350 27.7420 1.9360 27.7670 1.9370 27.8010 1.9380 27.8440 1.9390 27.8940 1.9400 27.9480 1.9410 28.0060 1.9420 28.0640 1.9430 28.1180 1.9440 28.1770 1.9450 28.2330 1.9460 28.2960 1.9470 28.3610 1.9480 28.4320 1.9490 28.5110 1.9500 28.5860 1.9510 28.6600 1.9520 28.7360 1.9530 28.8020 1.9540 28.8670 1.9550 28.9310 1.9560 28.9900 1.9570 29.0570 1.9580 29.1330 1.9590 29.1990 1.9600 29.2650 1.9610 29.3290 1.9620 29.3890 1.9630 29.4460 1.9640 29.5080 1.9650 29.5750 1.9660 29.6310 1.9670 29.6800 1.9680 29.7320 1.9690 29.7800 1.9700 29.8270 1.9710 29.8750 1.9720 29.9230 1.9730 29.9690 1.9740 30.0150 1.9750 30.0640 1.9760 30.1140 1.9770 30.1610 1.9780 30.2040 1.9790 30.2510 1.9800 30.2990 1.9810 30.3470 1.9820 30.4000 1.9830 30.4480 1.9840 30.4940 1.9850 30.5390 1.9860 30.5840 1.9870 30.6250 1.9880 30.6600 1.9890 30.6970 1.9900 30.7280 1.9910 30.7530 1.9920 30.7800 1.9930 30.8100 1.9940 30.8270 1.9950 30.8400 1.9960 30.8490 1.9970 30.8450 1.9980 30.8460 1.9990 30.8490 2.0000 30.8480 2.0010 30.8330 2.0020 30.8140 2.0030 30.7840 2.0040 30.7420 2.0050 30.7040 2.0060 30.6700 2.0070 30.6210 2.0080 30.5630 2.0090 30.5020 2.0100 30.4340 2.0110 30.3580 2.0120 30.2780 2.0130 30.1930 2.0140 30.1010 2.0150 30.0020 2.0160 29.9030 2.0170 29.8000 2.0180 29.6880 2.0190 29.5660 2.0200 29.4440 2.0210 29.3150 2.0220 29.1880 2.0230 29.0600 2.0240 28.9290 2.0250 28.7940 2.0260 28.6590 2.0270 28.5260 2.0280 28.3830 2.0290 28.2320 2.0300 28.0800 2.0310 27.9290 2.0320 27.7700 2.0330 27.6200 2.0340 27.4810 2.0350 27.3330 2.0360 27.1830 2.0370 27.0350 2.0380 26.8820 2.0390 26.7360 2.0400 26.5930 2.0410 26.4520 2.0420 26.3150 2.0430 26.1810 2.0440 26.0450 2.0450 25.9080 2.0460 25.7780 2.0470 25.6450 2.0480 25.5240 2.0490 25.4120 2.0500 25.3020 2.0510 25.2000 2.0520 25.0990 2.0530 25.0010 2.0540 24.9070 2.0550 24.8190 2.0560 24.7300 2.0570 24.6470 2.0580 24.5620 2.0590 24.4840 2.0600 24.4070 2.0610 24.3390 2.0620 24.2780 2.0630 24.2190 2.0640 24.1650 2.0650 24.1100 2.0660 24.0560 2.0670 24.0060 2.0680 23.9590 2.0690 23.9100 2.0700 23.8630 2.0710 23.8180 2.0720 23.7670 2.0730 23.7300 2.0740 23.6990 2.0750 23.6780 2.0760 23.6520 2.0770 23.6320 2.0780 23.6070 2.0790 23.5750 2.0800 23.5470 2.0810 23.5190 2.0820 23.4930 2.0830 23.4670 2.0840 23.4440 2.0850 23.4240 2.0860 23.4050 2.0870 23.3870 2.0880 23.3740 2.0890 23.3560 2.0900 23.3370 2.0910 23.3220 2.0920 23.3050 2.0930 23.2950 2.0940 23.2920 2.0950 23.2840 2.0960 23.2700 2.0970 23.2640 2.0980 23.2590 2.0990 23.2580 2.1000 23.2640 2.1010 23.2700 2.1020 23.2790 2.1030 23.2880 2.1040 23.2970 2.1050 23.3130 2.1060 23.3270 2.1070 23.3390 2.1080 23.3550 2.1090 23.3760 2.1100 23.3920 2.1110 23.4040 2.1120 23.4190 2.1130 23.4310 2.1140 23.4470 2.1150 23.4710 2.1160 23.4900 2.1170 23.5100 2.1180 23.5310 2.1190 23.5490 2.1200 23.5720 2.1210 23.5990 2.1220 23.6230 2.1230 23.6450 2.1240 23.6680 2.1250 23.6870 2.1260 23.7000 2.1270 23.7160 2.1280 23.7340 2.1290 23.7480 2.1300 23.7610 2.1310 23.7700 2.1320 23.7720 2.1330 23.7740 2.1340 23.7720 2.1350 23.7700 2.1360 23.7670 2.1370 23.7650 2.1380 23.7610 2.1390 23.7610 2.1400 23.7590 2.1410 23.7680 2.1420 23.7810 2.1430 23.8000 2.1440 23.8210 2.1450 23.8470 2.1460 23.8770 2.1470 23.9130 2.1480 23.9470 2.1490 23.9890 2.1500 24.0270 2.1510 24.0610 2.1520 24.0970 2.1530 24.1400 2.1540 24.1700 2.1550 24.1940 2.1560 24.2140 2.1570 24.2250 2.1580 24.2350 2.1590 24.2390 2.1600 24.2450 2.1610 24.2500 2.1620 24.2560 2.1630 24.2680 2.1640 24.2900 2.1650 24.3030 2.1660 24.3110 2.1670 24.3300 2.1680 24.3440 2.1690 24.3770 2.1700 24.4280 2.1710 24.4750 2.1720 24.5350 2.1730 24.5870 2.1740 24.6370 2.1750 24.6850 2.1760 24.7330 2.1770 24.7750 2.1780 24.8120 2.1790 24.8470 2.1800 24.8730 2.1810 24.9060 2.1820 24.9450 2.1830 24.9830 2.1840 25.0120 2.1850 25.0400 2.1860 25.0660 2.1870 25.0830 2.1880 25.1020 2.1890 25.1210 2.1900 25.1440 2.1910 25.1610 2.1920 25.1880 2.1930 25.2290 2.1940 25.2600 2.1950 25.2860 2.1960 25.3180 2.1970 25.3460 2.1980 25.3810 2.1990 25.4280 2.2000 25.4670 2.2010 25.5090 2.2020 25.5440 2.2030 25.5730 2.2040 25.6000 2.2050 25.6240 2.2060 25.6440 2.2070 25.6620 2.2080 25.6800 2.2090 25.6890 2.2100 25.6900 2.2110 25.6950 2.2120 25.6930 2.2130 25.6980 2.2140 25.7060 2.2150 25.7120 2.2160 25.7180 2.2170 25.7190 2.2180 25.7160 2.2190 25.7130 2.2200 25.7110 2.2210 25.7000 2.2220 25.6870 2.2230 25.6670 2.2240 25.6380 2.2250 25.6070 2.2260 25.5740 2.2270 25.5350 2.2280 25.4930 2.2290 25.4400 2.2300 25.3770 2.2310 25.3080 2.2320 25.2310 2.2330 25.1490 2.2340 25.0620 2.2350 24.9680 2.2360 24.8800 2.2370 24.7880 2.2380 24.6970 2.2390 24.6070 2.2400 24.5220 2.2410 24.4390 2.2420 24.3550 2.2430 24.2810 2.2440 24.1960 2.2450 24.1030 2.2460 24.0150 2.2470 23.9230 2.2480 23.8390 2.2490 23.7660 2.2500 23.6830 2.2510 23.5970 2.2520 23.5150 2.2530 23.4250 2.2540 23.3450 2.2550 23.2860 2.2560 23.2070 2.2570 23.1220 2.2580 23.0330 2.2590 22.9240 2.2600 22.8260 2.2610 22.7310 2.2620 22.6380 2.2630 22.5490 2.2640 22.4640 2.2650 22.3880 2.2660 22.3010 2.2670 22.2160 2.2680 22.1280 2.2690 22.0540 2.2700 21.9770 2.2710 21.9160 2.2720 21.8730 2.2730 21.8170 2.2740 21.7610 2.2750 21.7080 2.2760 21.6540 2.2770 21.6030 2.2780 21.5540 2.2790 21.5110 2.2800 21.4660 2.2810 21.4350 2.2820 21.4230 2.2830 21.3950 2.2840 21.3630 2.2850 21.3360 2.2860 21.3050 2.2870 21.2740 2.2880 21.2480 2.2890 21.2210 2.2900 21.1880 2.2910 21.1600 2.2920 21.1400 2.2930 21.1120 2.2940 21.0790 2.2950 21.0440 2.2960 21.0000 2.2970 20.9620 2.2980 20.9260 2.2990 20.8870 2.3000 20.8460 2.3010 20.8060 2.3020 20.7650 2.3030 20.7160 2.3040 20.6680 2.3050 20.6270 2.3060 20.5860 2.3070 20.5530 2.3080 20.5310 2.3090 20.5160 2.3100 20.5050 2.3110 20.4910 2.3120 20.4780 2.3130 20.4630 2.3140 20.4610 2.3150 20.4640 2.3160 20.4730 2.3170 20.4980 2.3180 20.5100 2.3190 20.5210 2.3200 20.5280 2.3210 20.5260 2.3220 20.5310 2.3230 20.5450 2.3240 20.5430 2.3250 20.5300 2.3260 20.5290 2.3270 20.5150 2.3280 20.5170 2.3290 20.5410 2.3300 20.5530 2.3310 20.5630 2.3320 20.5700 2.3330 20.5770 2.3340 20.5770 2.3350 20.5640 2.3360 20.5660 2.3370 20.5730 2.3380 20.5740 2.3390 20.5840 2.3400 20.5850 2.3410 20.5690 2.3420 20.5640 2.3430 20.5590 2.3440 20.5600 2.3450 20.5690 2.3460 20.5730 2.3470 20.5800 2.3480 20.5950 2.3490 20.6100 2.3500 20.6270 2.3510 20.6510 2.3520 20.6730 2.3530 20.6960 2.3540 20.7120 2.3550 20.7360 2.3560 20.7620 2.3570 20.7810 2.3580 20.8040 2.3590 20.8350 2.3600 20.8580 2.3610 20.8790 2.3620 20.9040 2.3630 20.9230 2.3640 20.9320 2.3650 20.9350 2.3660 20.9470 2.3670 20.9620 2.3680 20.9790 2.3690 21.0030 2.3700 21.0280 2.3710 21.0470 2.3720 21.0560 2.3730 21.0780 2.3740 21.1110 2.3750 21.1380 2.3760 21.1760 2.3770 21.2130 2.3780 21.2380 2.3790 21.2780 2.3800 21.3130 2.3810 21.3430 2.3820 21.3590 2.3830 21.3490 2.3840 21.3340 2.3850 21.3160 2.3860 21.3100 2.3870 21.3100 2.3880 21.3150 2.3890 21.3330 2.3900 21.3340 2.3910 21.3160 2.3920 21.3170 2.3930 21.3170 2.3940 21.3120 2.3950 21.3140 2.3960 21.3010 2.3970 21.2840 2.3980 21.2600 2.3990 21.2380 2.4000 21.2280 2.4010 21.1960 2.4020 21.1750 2.4030 21.1610 2.4040 21.1430 2.4050 21.1460 2.4060 21.1380 2.4070 21.1020 2.4080 21.0450 2.4090 20.9890 2.4100 20.9380 2.4110 20.9060 2.4120 20.8950 2.4130 20.8600 2.4140 20.7900 2.4150 20.7160 2.4160 20.6500 2.4170 20.5780 2.4180 20.5150 2.4190 20.4840 2.4200 20.4370 2.4210 20.3700 2.4220 20.3150 2.4230 20.2530 2.4240 20.1830 2.4250 20.1220 2.4260 20.0370 2.4270 19.9530 2.4280 19.8680 2.4290 19.7850 2.4300 19.7380 2.4310 19.6590 2.4320 19.5570 2.4330 19.4710 2.4340 19.3720 2.4350 19.2900 2.4360 19.2240 2.4370 19.1220 2.4380 19.0240 2.4390 18.9560 2.4400 18.8420 2.4410 18.7450 2.4420 18.6980 2.4430 18.5960 2.4440 18.4980 2.4450 18.4190 2.4460 18.2990 2.4470 18.2040 2.4480 18.1220 2.4490 18.0140 2.4500 17.8940 2.4510 17.7620 2.4520 17.6290 2.4530 17.4970 2.4540 17.3850 2.4550 17.2530 2.4560 17.1240 2.4570 17.0530 2.4580 16.9580 2.4590 16.8720 2.4600 16.7980 2.4610 16.6940 2.4620 16.6520 2.4630 16.6320 2.4640 16.5760 2.4650 16.5360 2.4660 16.4570 2.4670 16.3660 2.4680 16.3570 2.4690 16.3300 2.4700 16.3290 2.4710 16.3690 2.4720 16.3650 2.4730 16.3720 2.4740 16.3700 2.4750 16.3260 2.4760 16.3100 2.4770 16.3300 2.4780 16.3190 2.4790 16.2870 2.4800 16.2730 2.4810 16.2640 2.4820 16.1840 2.4830 16.0870 2.4840 15.9880 2.4850 15.9420 2.4860 15.9970 2.4870 16.0930 2.4880 16.1000 2.4890 16.0830 2.4900 16.0760 2.4910 16.0420 2.4920 16.1050 2.4930 16.1710 2.4940 16.1900 2.4950 16.2230 2.4960 16.1440 2.4970 16.0610 2.4980 16.0030 2.4990 15.9130 2.5000 15.9450 spectral-0.22.4/spectral/tests/data/ecostress/c.spectrum.txt000066400000000000000000001111101412674721200241430ustar00rootroot00000000000000Name: Acer rubrum Type: Vegetation Class: Tree Genus: Acer Species: rubrum Sample No.: ACRU-1-13 Owner: UCSB Wavelength Range: VSWIR Origin: USA; Massachusetts; Harvard Forest Collection Date: 7/8/2013 Description: Samples were collected as part of NSF Macrosystem Biology proposal titled: Collaborative Research: Thermal controls on ecosystem metabolism and function: scaling from leaves to canopies to regions. Samples were collected and overnighted to JPL facilities for processing. The same leaves were processed in the Nicolet and then measured using the ASD. Measurement: Bidirectional reflectance First Column: X Second Column: Y X Units: Wavelength (micrometers) Y Units: Reflectance (percentage) First X Value: 0.35 Last X Value: 2.5 Number of X Values: 2151 Additional Information: vegetation.tree.acer.rubrum.vswir.acru-1-13.ucsb.asd.ancillary.txt 0.3500 10.9880 0.3510 10.6410 0.3520 10.3770 0.3530 10.4190 0.3540 10.7070 0.3550 10.7170 0.3560 10.5650 0.3570 10.4340 0.3580 10.4910 0.3590 10.4400 0.3600 10.3190 0.3610 10.2300 0.3620 10.3220 0.3630 10.2930 0.3640 10.2760 0.3650 10.4020 0.3660 10.3680 0.3670 10.3030 0.3680 10.2850 0.3690 10.3380 0.3700 10.3590 0.3710 10.3410 0.3720 10.3070 0.3730 10.3010 0.3740 10.3140 0.3750 10.2640 0.3760 10.1230 0.3770 10.2310 0.3780 10.3190 0.3790 10.2730 0.3800 10.1490 0.3810 10.1710 0.3820 10.2050 0.3830 10.1620 0.3840 10.1940 0.3850 10.2180 0.3860 10.2180 0.3870 10.2060 0.3880 10.1980 0.3890 10.1910 0.3900 10.1680 0.3910 10.1000 0.3920 10.0790 0.3930 10.0810 0.3940 10.0830 0.3950 10.1490 0.3960 10.1160 0.3970 10.0470 0.3980 10.0820 0.3990 10.0660 0.4000 10.0690 0.4010 10.0960 0.4020 10.0390 0.4030 9.9710 0.4040 9.9610 0.4050 10.0220 0.4060 9.9570 0.4070 9.9520 0.4080 10.0010 0.4090 9.9750 0.4100 9.9950 0.4110 9.9940 0.4120 9.9540 0.4130 9.9910 0.4140 10.0060 0.4150 10.0200 0.4160 10.0660 0.4170 10.0390 0.4180 10.0150 0.4190 10.0280 0.4200 10.0670 0.4210 10.0590 0.4220 10.0400 0.4230 10.0460 0.4240 10.0750 0.4250 10.0960 0.4260 10.0970 0.4270 10.0830 0.4280 10.0870 0.4290 10.0860 0.4300 10.0740 0.4310 10.0790 0.4320 10.0500 0.4330 10.0040 0.4340 9.9710 0.4350 9.9540 0.4360 9.9600 0.4370 9.9890 0.4380 10.0330 0.4390 10.0300 0.4400 10.0360 0.4410 10.0780 0.4420 10.0840 0.4430 10.0790 0.4440 10.0720 0.4450 10.0680 0.4460 10.0710 0.4470 10.0860 0.4480 10.1030 0.4490 10.0990 0.4500 10.0900 0.4510 10.0840 0.4520 10.0880 0.4530 10.1070 0.4540 10.1050 0.4550 10.0960 0.4560 10.1150 0.4570 10.1100 0.4580 10.0960 0.4590 10.0900 0.4600 10.0960 0.4610 10.1070 0.4620 10.1150 0.4630 10.1110 0.4640 10.1130 0.4650 10.1150 0.4660 10.1100 0.4670 10.0940 0.4680 10.0920 0.4690 10.0960 0.4700 10.0980 0.4710 10.1090 0.4720 10.1030 0.4730 10.0880 0.4740 10.0910 0.4750 10.0860 0.4760 10.0840 0.4770 10.0880 0.4780 10.0860 0.4790 10.0750 0.4800 10.0670 0.4810 10.0700 0.4820 10.0690 0.4830 10.0720 0.4840 10.0740 0.4850 10.0610 0.4860 10.0540 0.4870 10.0600 0.4880 10.0770 0.4890 10.0760 0.4900 10.0800 0.4910 10.0880 0.4920 10.0890 0.4930 10.0980 0.4940 10.1180 0.4950 10.1390 0.4960 10.1410 0.4970 10.1550 0.4980 10.1720 0.4990 10.1800 0.5000 10.1960 0.5010 10.2210 0.5020 10.2480 0.5030 10.2660 0.5040 10.2850 0.5050 10.3160 0.5060 10.3570 0.5070 10.3850 0.5080 10.4300 0.5090 10.4860 0.5100 10.5330 0.5110 10.5980 0.5120 10.6730 0.5130 10.7510 0.5140 10.8330 0.5150 10.9310 0.5160 11.0460 0.5170 11.1720 0.5180 11.2990 0.5190 11.4390 0.5200 11.5940 0.5210 11.7550 0.5220 11.9100 0.5230 12.0710 0.5240 12.2450 0.5250 12.4100 0.5260 12.5740 0.5270 12.7350 0.5280 12.8830 0.5290 13.0210 0.5300 13.1540 0.5310 13.2820 0.5320 13.3980 0.5330 13.5000 0.5340 13.5930 0.5350 13.6780 0.5360 13.7400 0.5370 13.8000 0.5380 13.8610 0.5390 13.9110 0.5400 13.9450 0.5410 13.9680 0.5420 13.9860 0.5430 14.0180 0.5440 14.0430 0.5450 14.0670 0.5460 14.1090 0.5470 14.1730 0.5480 14.2340 0.5490 14.2770 0.5500 14.3020 0.5510 14.3160 0.5520 14.3200 0.5530 14.3120 0.5540 14.3020 0.5550 14.2780 0.5560 14.2370 0.5570 14.1790 0.5580 14.1090 0.5590 14.0300 0.5600 13.9480 0.5610 13.8610 0.5620 13.7730 0.5630 13.6780 0.5640 13.5710 0.5650 13.4550 0.5660 13.3340 0.5670 13.2100 0.5680 13.0860 0.5690 12.9580 0.5700 12.8250 0.5710 12.6890 0.5720 12.5710 0.5730 12.4610 0.5740 12.3550 0.5750 12.2540 0.5760 12.1610 0.5770 12.0800 0.5780 12.0110 0.5790 11.9400 0.5800 11.8790 0.5810 11.8230 0.5820 11.7660 0.5830 11.7120 0.5840 11.6540 0.5850 11.5970 0.5860 11.5540 0.5870 11.5150 0.5880 11.4830 0.5890 11.4620 0.5900 11.4470 0.5910 11.4280 0.5920 11.4050 0.5930 11.3890 0.5940 11.3770 0.5950 11.3660 0.5960 11.3520 0.5970 11.3310 0.5980 11.3090 0.5990 11.2880 0.6000 11.2650 0.6010 11.2410 0.6020 11.2150 0.6030 11.1880 0.6040 11.1680 0.6050 11.1430 0.6060 11.1150 0.6070 11.0860 0.6080 11.0500 0.6090 10.9990 0.6100 10.9330 0.6110 10.8610 0.6120 10.8290 0.6130 10.8150 0.6140 10.8010 0.6150 10.7790 0.6160 10.7530 0.6170 10.7250 0.6180 10.6980 0.6190 10.6720 0.6200 10.6510 0.6210 10.6330 0.6220 10.6150 0.6230 10.6010 0.6240 10.5880 0.6250 10.5770 0.6260 10.5690 0.6270 10.5620 0.6280 10.5560 0.6290 10.5500 0.6300 10.5360 0.6310 10.5290 0.6320 10.5290 0.6330 10.5230 0.6340 10.5110 0.6350 10.4960 0.6360 10.4770 0.6370 10.4550 0.6380 10.4340 0.6390 10.4120 0.6400 10.3820 0.6410 10.3460 0.6420 10.3170 0.6430 10.2980 0.6440 10.2770 0.6450 10.2510 0.6460 10.2250 0.6470 10.2020 0.6480 10.1790 0.6490 10.1610 0.6500 10.1480 0.6510 10.1380 0.6520 10.1270 0.6530 10.1160 0.6540 10.1050 0.6550 10.1020 0.6560 10.0950 0.6570 10.0830 0.6580 10.0710 0.6590 10.0650 0.6600 10.0520 0.6610 10.0320 0.6620 10.0220 0.6630 10.0170 0.6640 10.0130 0.6650 10.0070 0.6660 10.0000 0.6670 9.9950 0.6680 9.9950 0.6690 10.0000 0.6700 10.0040 0.6710 10.0080 0.6720 10.0140 0.6730 10.0250 0.6740 10.0340 0.6750 10.0440 0.6760 10.0580 0.6770 10.0710 0.6780 10.0860 0.6790 10.1030 0.6800 10.1230 0.6810 10.1430 0.6820 10.1630 0.6830 10.1870 0.6840 10.2200 0.6850 10.2480 0.6860 10.2770 0.6870 10.3150 0.6880 10.3650 0.6890 10.4270 0.6900 10.5050 0.6910 10.6030 0.6920 10.7290 0.6930 10.8960 0.6940 11.1150 0.6950 11.3830 0.6960 11.7110 0.6970 12.1070 0.6980 12.5750 0.6990 13.1060 0.7000 13.7040 0.7010 14.3620 0.7020 15.0660 0.7030 15.8220 0.7040 16.6160 0.7050 17.4360 0.7060 18.2790 0.7070 19.1280 0.7080 19.9910 0.7090 20.8820 0.7100 21.7760 0.7110 22.6780 0.7120 23.5860 0.7130 24.4910 0.7140 25.4030 0.7150 26.3190 0.7160 27.2310 0.7170 28.1440 0.7180 29.0510 0.7190 29.9530 0.7200 30.8560 0.7210 31.7490 0.7220 32.6290 0.7230 33.4960 0.7240 34.3470 0.7250 35.1830 0.7260 35.9980 0.7270 36.7900 0.7280 37.5560 0.7290 38.3000 0.7300 39.0160 0.7310 39.6990 0.7320 40.3630 0.7330 41.0090 0.7340 41.6250 0.7350 42.1920 0.7360 42.7370 0.7370 43.2710 0.7380 43.7880 0.7390 44.2520 0.7400 44.6830 0.7410 45.0960 0.7420 45.4980 0.7430 45.8570 0.7440 46.1920 0.7450 46.5080 0.7460 46.7910 0.7470 47.0710 0.7480 47.3360 0.7490 47.5680 0.7500 47.7640 0.7510 47.9660 0.7520 48.1560 0.7530 48.3030 0.7540 48.4460 0.7550 48.5870 0.7560 48.7160 0.7570 48.8160 0.7580 48.9220 0.7590 49.0160 0.7600 49.0870 0.7610 49.1450 0.7620 49.2140 0.7630 49.2790 0.7640 49.3260 0.7650 49.3950 0.7660 49.4450 0.7670 49.4720 0.7680 49.4940 0.7690 49.5230 0.7700 49.5570 0.7710 49.5840 0.7720 49.5810 0.7730 49.6090 0.7740 49.6470 0.7750 49.6660 0.7760 49.6720 0.7770 49.6930 0.7780 49.7130 0.7790 49.7090 0.7800 49.7310 0.7810 49.7460 0.7820 49.7430 0.7830 49.7440 0.7840 49.7610 0.7850 49.7760 0.7860 49.7770 0.7870 49.7760 0.7880 49.7860 0.7890 49.8040 0.7900 49.8180 0.7910 49.8090 0.7920 49.7980 0.7930 49.7980 0.7940 49.8080 0.7950 49.8160 0.7960 49.8150 0.7970 49.8100 0.7980 49.8120 0.7990 49.8180 0.8000 49.8210 0.8010 49.8190 0.8020 49.8120 0.8030 49.8010 0.8040 49.7960 0.8050 49.8040 0.8060 49.7850 0.8070 49.7760 0.8080 49.7790 0.8090 49.7690 0.8100 49.7560 0.8110 49.7530 0.8120 49.7570 0.8130 49.7480 0.8140 49.7390 0.8150 49.7250 0.8160 49.7080 0.8170 49.7100 0.8180 49.7050 0.8190 49.6930 0.8200 49.6830 0.8210 49.6750 0.8220 49.6830 0.8230 49.6910 0.8240 49.6610 0.8250 49.6510 0.8260 49.6410 0.8270 49.6230 0.8280 49.6240 0.8290 49.6210 0.8300 49.6130 0.8310 49.6030 0.8320 49.5850 0.8330 49.5830 0.8340 49.5910 0.8350 49.5890 0.8360 49.5860 0.8370 49.5640 0.8380 49.5410 0.8390 49.5550 0.8400 49.5670 0.8410 49.5610 0.8420 49.5450 0.8430 49.5520 0.8440 49.5610 0.8450 49.5600 0.8460 49.5510 0.8470 49.5540 0.8480 49.5600 0.8490 49.5620 0.8500 49.5550 0.8510 49.5370 0.8520 49.5480 0.8530 49.5710 0.8540 49.5640 0.8550 49.5590 0.8560 49.5690 0.8570 49.5770 0.8580 49.5480 0.8590 49.5410 0.8600 49.5380 0.8610 49.5360 0.8620 49.5710 0.8630 49.5720 0.8640 49.5670 0.8650 49.5840 0.8660 49.5830 0.8670 49.5830 0.8680 49.5880 0.8690 49.5950 0.8700 49.6070 0.8710 49.6080 0.8720 49.6020 0.8730 49.6020 0.8740 49.6070 0.8750 49.6060 0.8760 49.6020 0.8770 49.6120 0.8780 49.6230 0.8790 49.6230 0.8800 49.6130 0.8810 49.6100 0.8820 49.6260 0.8830 49.6440 0.8840 49.6500 0.8850 49.6460 0.8860 49.6510 0.8870 49.6580 0.8880 49.6550 0.8890 49.6430 0.8900 49.6390 0.8910 49.6470 0.8920 49.6650 0.8930 49.6680 0.8940 49.6570 0.8950 49.6490 0.8960 49.6640 0.8970 49.6730 0.8980 49.6890 0.8990 49.7000 0.9000 49.6720 0.9010 49.6680 0.9020 49.6840 0.9030 49.6990 0.9040 49.6890 0.9050 49.6970 0.9060 49.7040 0.9070 49.6970 0.9080 49.6990 0.9090 49.7150 0.9100 49.7140 0.9110 49.6870 0.9120 49.6980 0.9130 49.7270 0.9140 49.7390 0.9150 49.7170 0.9160 49.7370 0.9170 49.7190 0.9180 49.6930 0.9190 49.7180 0.9200 49.7470 0.9210 49.7490 0.9220 49.7320 0.9230 49.7200 0.9240 49.7180 0.9250 49.6990 0.9260 49.6720 0.9270 49.6650 0.9280 49.6680 0.9290 49.6680 0.9300 49.6570 0.9310 49.6380 0.9320 49.6300 0.9330 49.6210 0.9340 49.6150 0.9350 49.6310 0.9360 49.6080 0.9370 49.5780 0.9380 49.5620 0.9390 49.5460 0.9400 49.5220 0.9410 49.5040 0.9420 49.4990 0.9430 49.4840 0.9440 49.4710 0.9450 49.4530 0.9460 49.4240 0.9470 49.4010 0.9480 49.3480 0.9490 49.3070 0.9500 49.2940 0.9510 49.2210 0.9520 49.1830 0.9530 49.1650 0.9540 49.1350 0.9550 49.1070 0.9560 49.0640 0.9570 49.0050 0.9580 48.9410 0.9590 48.9220 0.9600 48.9000 0.9610 48.8820 0.9620 48.8790 0.9630 48.8490 0.9640 48.7970 0.9650 48.7570 0.9660 48.7540 0.9670 48.7250 0.9680 48.7160 0.9690 48.7310 0.9700 48.7430 0.9710 48.6910 0.9720 48.6650 0.9730 48.6710 0.9740 48.6780 0.9750 48.6850 0.9760 48.6710 0.9770 48.6580 0.9780 48.6700 0.9790 48.6540 0.9800 48.6390 0.9810 48.6200 0.9820 48.5910 0.9830 48.6170 0.9840 48.6390 0.9850 48.6210 0.9860 48.5710 0.9870 48.5990 0.9880 48.6410 0.9890 48.6740 0.9900 48.6900 0.9910 48.6590 0.9920 48.6950 0.9930 48.7320 0.9940 48.7690 0.9950 48.8060 0.9960 48.8430 0.9970 48.8790 0.9980 48.9160 0.9990 48.9530 1.0000 48.9900 1.0010 49.0270 1.0020 49.0630 1.0030 49.1000 1.0040 49.1370 1.0050 49.1740 1.0060 49.2110 1.0070 49.2470 1.0080 49.2840 1.0090 49.3210 1.0100 49.3210 1.0110 49.3250 1.0120 49.3350 1.0130 49.3610 1.0140 49.3730 1.0150 49.3820 1.0160 49.3780 1.0170 49.3810 1.0180 49.3830 1.0190 49.3910 1.0200 49.4030 1.0210 49.4050 1.0220 49.4110 1.0230 49.4190 1.0240 49.4310 1.0250 49.4350 1.0260 49.4370 1.0270 49.4410 1.0280 49.4380 1.0290 49.4420 1.0300 49.4530 1.0310 49.4600 1.0320 49.4660 1.0330 49.4690 1.0340 49.4840 1.0350 49.5040 1.0360 49.5120 1.0370 49.5170 1.0380 49.5200 1.0390 49.5090 1.0400 49.5110 1.0410 49.5240 1.0420 49.5270 1.0430 49.5260 1.0440 49.5290 1.0450 49.5320 1.0460 49.5360 1.0470 49.5450 1.0480 49.5480 1.0490 49.5450 1.0500 49.5450 1.0510 49.5410 1.0520 49.5470 1.0530 49.5460 1.0540 49.5460 1.0550 49.5460 1.0560 49.5440 1.0570 49.5410 1.0580 49.5340 1.0590 49.5350 1.0600 49.5360 1.0610 49.5410 1.0620 49.5610 1.0630 49.5670 1.0640 49.5680 1.0650 49.5670 1.0660 49.5570 1.0670 49.5510 1.0680 49.5460 1.0690 49.5400 1.0700 49.5410 1.0710 49.5440 1.0720 49.5530 1.0730 49.5730 1.0740 49.5780 1.0750 49.5780 1.0760 49.5790 1.0770 49.5740 1.0780 49.5690 1.0790 49.5690 1.0800 49.5630 1.0810 49.5470 1.0820 49.5370 1.0830 49.5290 1.0840 49.5230 1.0850 49.5190 1.0860 49.5190 1.0870 49.5190 1.0880 49.5210 1.0890 49.5170 1.0900 49.5120 1.0910 49.5050 1.0920 49.4940 1.0930 49.4830 1.0940 49.4720 1.0950 49.4590 1.0960 49.4390 1.0970 49.4260 1.0980 49.4170 1.0990 49.4040 1.1000 49.3950 1.1010 49.3810 1.1020 49.3650 1.1030 49.3440 1.1040 49.3240 1.1050 49.3110 1.1060 49.2990 1.1070 49.2920 1.1080 49.2770 1.1090 49.2610 1.1100 49.2410 1.1110 49.2090 1.1120 49.1870 1.1130 49.1700 1.1140 49.1480 1.1150 49.1280 1.1160 49.1070 1.1170 49.0850 1.1180 49.0590 1.1190 49.0330 1.1200 49.0070 1.1210 48.9780 1.1220 48.9510 1.1230 48.9190 1.1240 48.8820 1.1250 48.8460 1.1260 48.8050 1.1270 48.7650 1.1280 48.7290 1.1290 48.6870 1.1300 48.6440 1.1310 48.5950 1.1320 48.5420 1.1330 48.4840 1.1340 48.4200 1.1350 48.3550 1.1360 48.2860 1.1370 48.2200 1.1380 48.1410 1.1390 48.0570 1.1400 47.9720 1.1410 47.8810 1.1420 47.7970 1.1430 47.7150 1.1440 47.6290 1.1450 47.5410 1.1460 47.4530 1.1470 47.3620 1.1480 47.2750 1.1490 47.1890 1.1500 47.1070 1.1510 47.0270 1.1520 46.9530 1.1530 46.8860 1.1540 46.8210 1.1550 46.7580 1.1560 46.7030 1.1570 46.6550 1.1580 46.6090 1.1590 46.5730 1.1600 46.5430 1.1610 46.5180 1.1620 46.4960 1.1630 46.4770 1.1640 46.4600 1.1650 46.4440 1.1660 46.4280 1.1670 46.4140 1.1680 46.4040 1.1690 46.3950 1.1700 46.3860 1.1710 46.3780 1.1720 46.3730 1.1730 46.3660 1.1740 46.3580 1.1750 46.3490 1.1760 46.3430 1.1770 46.3350 1.1780 46.3250 1.1790 46.3230 1.1800 46.3170 1.1810 46.3080 1.1820 46.3030 1.1830 46.2900 1.1840 46.2740 1.1850 46.2670 1.1860 46.2580 1.1870 46.2500 1.1880 46.2410 1.1890 46.2330 1.1900 46.2260 1.1910 46.2190 1.1920 46.2130 1.1930 46.2140 1.1940 46.2090 1.1950 46.2010 1.1960 46.1940 1.1970 46.1870 1.1980 46.1780 1.1990 46.1730 1.2000 46.1730 1.2010 46.1710 1.2020 46.1770 1.2030 46.1830 1.2040 46.1870 1.2050 46.1940 1.2060 46.1930 1.2070 46.1920 1.2080 46.1940 1.2090 46.2000 1.2100 46.2090 1.2110 46.2160 1.2120 46.2230 1.2130 46.2320 1.2140 46.2390 1.2150 46.2480 1.2160 46.2600 1.2170 46.2720 1.2180 46.2790 1.2190 46.2890 1.2200 46.3000 1.2210 46.3120 1.2220 46.3250 1.2230 46.3370 1.2240 46.3490 1.2250 46.3630 1.2260 46.3730 1.2270 46.3830 1.2280 46.3940 1.2290 46.4020 1.2300 46.4100 1.2310 46.4210 1.2320 46.4300 1.2330 46.4400 1.2340 46.4480 1.2350 46.4540 1.2360 46.4620 1.2370 46.4690 1.2380 46.4750 1.2390 46.4810 1.2400 46.4880 1.2410 46.4920 1.2420 46.4990 1.2430 46.5110 1.2440 46.5220 1.2450 46.5320 1.2460 46.5370 1.2470 46.5390 1.2480 46.5360 1.2490 46.5390 1.2500 46.5460 1.2510 46.5460 1.2520 46.5480 1.2530 46.5470 1.2540 46.5420 1.2550 46.5380 1.2560 46.5320 1.2570 46.5310 1.2580 46.5330 1.2590 46.5340 1.2600 46.5420 1.2610 46.5420 1.2620 46.5360 1.2630 46.5270 1.2640 46.5210 1.2650 46.5130 1.2660 46.5070 1.2670 46.5050 1.2680 46.5000 1.2690 46.4930 1.2700 46.4840 1.2710 46.4750 1.2720 46.4680 1.2730 46.4580 1.2740 46.4480 1.2750 46.4370 1.2760 46.4160 1.2770 46.3940 1.2780 46.3760 1.2790 46.3590 1.2800 46.3410 1.2810 46.3230 1.2820 46.3070 1.2830 46.2970 1.2840 46.2870 1.2850 46.2790 1.2860 46.2720 1.2870 46.2610 1.2880 46.2430 1.2890 46.2270 1.2900 46.2120 1.2910 46.1910 1.2920 46.1680 1.2930 46.1450 1.2940 46.1190 1.2950 46.0980 1.2960 46.0750 1.2970 46.0470 1.2980 46.0180 1.2990 45.9830 1.3000 45.9460 1.3010 45.9130 1.3020 45.8790 1.3030 45.8460 1.3040 45.8140 1.3050 45.7780 1.3060 45.7360 1.3070 45.6860 1.3080 45.6370 1.3090 45.5880 1.3100 45.5360 1.3110 45.4870 1.3120 45.4330 1.3130 45.3740 1.3140 45.3130 1.3150 45.2510 1.3160 45.1840 1.3170 45.1140 1.3180 45.0400 1.3190 44.9630 1.3200 44.8850 1.3210 44.8020 1.3220 44.7200 1.3230 44.6360 1.3240 44.5510 1.3250 44.4660 1.3260 44.3760 1.3270 44.2850 1.3280 44.1910 1.3290 44.0910 1.3300 43.9920 1.3310 43.8910 1.3320 43.7890 1.3330 43.6890 1.3340 43.5890 1.3350 43.4860 1.3360 43.3790 1.3370 43.2750 1.3380 43.1660 1.3390 43.0550 1.3400 42.9460 1.3410 42.8390 1.3420 42.7350 1.3430 42.6330 1.3440 42.5320 1.3450 42.4290 1.3460 42.3240 1.3470 42.2220 1.3480 42.1210 1.3490 42.0230 1.3500 41.9280 1.3510 41.8350 1.3520 41.7420 1.3530 41.6500 1.3540 41.5580 1.3550 41.4650 1.3560 41.3740 1.3570 41.2800 1.3580 41.1830 1.3590 41.0840 1.3600 40.9800 1.3610 40.8740 1.3620 40.7650 1.3630 40.6480 1.3640 40.5250 1.3650 40.3940 1.3660 40.2560 1.3670 40.1150 1.3680 39.9660 1.3690 39.8070 1.3700 39.6390 1.3710 39.4600 1.3720 39.2710 1.3730 39.0720 1.3740 38.8620 1.3750 38.6360 1.3760 38.3960 1.3770 38.1360 1.3780 37.8530 1.3790 37.5560 1.3800 37.2380 1.3810 36.8980 1.3820 36.5400 1.3830 36.1650 1.3840 35.7690 1.3850 35.3550 1.3860 34.9250 1.3870 34.4760 1.3880 34.0200 1.3890 33.5500 1.3900 33.0650 1.3910 32.5780 1.3920 32.0820 1.3930 31.5740 1.3940 31.0640 1.3950 30.5490 1.3960 30.0350 1.3970 29.5300 1.3980 29.0310 1.3990 28.5450 1.4000 28.0670 1.4010 27.5990 1.4020 27.1470 1.4030 26.7080 1.4040 26.2860 1.4050 25.8820 1.4060 25.4960 1.4070 25.1340 1.4080 24.7910 1.4090 24.4680 1.4100 24.1670 1.4110 23.8820 1.4120 23.6170 1.4130 23.3730 1.4140 23.1470 1.4150 22.9430 1.4160 22.7560 1.4170 22.5840 1.4180 22.4280 1.4190 22.2850 1.4200 22.1560 1.4210 22.0410 1.4220 21.9360 1.4230 21.8420 1.4240 21.7600 1.4250 21.6850 1.4260 21.6190 1.4270 21.5610 1.4280 21.5130 1.4290 21.4690 1.4300 21.4340 1.4310 21.4030 1.4320 21.3790 1.4330 21.3580 1.4340 21.3410 1.4350 21.3290 1.4360 21.3220 1.4370 21.3200 1.4380 21.3210 1.4390 21.3250 1.4400 21.3320 1.4410 21.3390 1.4420 21.3510 1.4430 21.3650 1.4440 21.3790 1.4450 21.3960 1.4460 21.4130 1.4470 21.4300 1.4480 21.4490 1.4490 21.4710 1.4500 21.4950 1.4510 21.5190 1.4520 21.5420 1.4530 21.5650 1.4540 21.5880 1.4550 21.6160 1.4560 21.6490 1.4570 21.6830 1.4580 21.7230 1.4590 21.7640 1.4600 21.8070 1.4610 21.8550 1.4620 21.9070 1.4630 21.9630 1.4640 22.0270 1.4650 22.0940 1.4660 22.1670 1.4670 22.2460 1.4680 22.3290 1.4690 22.4160 1.4700 22.5090 1.4710 22.6080 1.4720 22.7090 1.4730 22.8130 1.4740 22.9210 1.4750 23.0280 1.4760 23.1400 1.4770 23.2580 1.4780 23.3740 1.4790 23.4910 1.4800 23.6090 1.4810 23.7270 1.4820 23.8470 1.4830 23.9690 1.4840 24.0920 1.4850 24.2170 1.4860 24.3410 1.4870 24.4640 1.4880 24.5870 1.4890 24.7070 1.4900 24.8290 1.4910 24.9530 1.4920 25.0760 1.4930 25.2010 1.4940 25.3230 1.4950 25.4460 1.4960 25.5660 1.4970 25.6810 1.4980 25.8000 1.4990 25.9190 1.5000 26.0390 1.5010 26.1630 1.5020 26.2830 1.5030 26.4030 1.5040 26.5220 1.5050 26.6390 1.5060 26.7570 1.5070 26.8740 1.5080 26.9900 1.5090 27.1050 1.5100 27.2200 1.5110 27.3330 1.5120 27.4470 1.5130 27.5580 1.5140 27.6670 1.5150 27.7750 1.5160 27.8810 1.5170 27.9860 1.5180 28.0910 1.5190 28.1930 1.5200 28.2960 1.5210 28.3990 1.5220 28.5020 1.5230 28.6020 1.5240 28.7020 1.5250 28.7970 1.5260 28.8910 1.5270 28.9860 1.5280 29.0810 1.5290 29.1740 1.5300 29.2670 1.5310 29.3580 1.5320 29.4440 1.5330 29.5340 1.5340 29.6230 1.5350 29.7140 1.5360 29.8060 1.5370 29.8960 1.5380 29.9820 1.5390 30.0670 1.5400 30.1490 1.5410 30.2300 1.5420 30.3110 1.5430 30.3920 1.5440 30.4720 1.5450 30.5510 1.5460 30.6300 1.5470 30.7060 1.5480 30.7790 1.5490 30.8530 1.5500 30.9230 1.5510 30.9920 1.5520 31.0600 1.5530 31.1270 1.5540 31.1940 1.5550 31.2630 1.5560 31.3330 1.5570 31.3980 1.5580 31.4610 1.5590 31.5250 1.5600 31.5860 1.5610 31.6510 1.5620 31.7180 1.5630 31.7790 1.5640 31.8400 1.5650 31.9000 1.5660 31.9560 1.5670 32.0140 1.5680 32.0740 1.5690 32.1300 1.5700 32.1880 1.5710 32.2480 1.5720 32.3010 1.5730 32.3530 1.5740 32.4020 1.5750 32.4490 1.5760 32.5000 1.5770 32.5520 1.5780 32.6040 1.5790 32.6530 1.5800 32.6990 1.5810 32.7440 1.5820 32.7870 1.5830 32.8320 1.5840 32.8760 1.5850 32.9190 1.5860 32.9610 1.5870 33.0010 1.5880 33.0450 1.5890 33.0890 1.5900 33.1330 1.5910 33.1800 1.5920 33.2260 1.5930 33.2690 1.5940 33.3110 1.5950 33.3520 1.5960 33.3900 1.5970 33.4300 1.5980 33.4710 1.5990 33.5100 1.6000 33.5480 1.6010 33.5860 1.6020 33.6230 1.6030 33.6620 1.6040 33.7020 1.6050 33.7420 1.6060 33.7830 1.6070 33.8210 1.6080 33.8540 1.6090 33.8870 1.6100 33.9180 1.6110 33.9480 1.6120 33.9820 1.6130 34.0120 1.6140 34.0380 1.6150 34.0640 1.6160 34.0880 1.6170 34.1120 1.6180 34.1330 1.6190 34.1560 1.6200 34.1770 1.6210 34.1990 1.6220 34.2240 1.6230 34.2480 1.6240 34.2720 1.6250 34.2950 1.6260 34.3150 1.6270 34.3330 1.6280 34.3490 1.6290 34.3620 1.6300 34.3710 1.6310 34.3810 1.6320 34.3890 1.6330 34.3970 1.6340 34.4060 1.6350 34.4110 1.6360 34.4160 1.6370 34.4170 1.6380 34.4140 1.6390 34.4110 1.6400 34.4040 1.6410 34.3940 1.6420 34.3850 1.6430 34.3700 1.6440 34.3490 1.6450 34.3250 1.6460 34.2960 1.6470 34.2630 1.6480 34.2340 1.6490 34.2110 1.6500 34.1860 1.6510 34.1610 1.6520 34.1340 1.6530 34.1010 1.6540 34.0750 1.6550 34.0610 1.6560 34.0470 1.6570 34.0400 1.6580 34.0400 1.6590 34.0400 1.6600 34.0510 1.6610 34.0670 1.6620 34.0850 1.6630 34.1090 1.6640 34.1340 1.6650 34.1650 1.6660 34.1950 1.6670 34.2170 1.6680 34.2370 1.6690 34.2550 1.6700 34.2700 1.6710 34.2830 1.6720 34.2920 1.6730 34.2990 1.6740 34.3010 1.6750 34.3020 1.6760 34.3010 1.6770 34.2970 1.6780 34.2900 1.6790 34.2810 1.6800 34.2700 1.6810 34.2590 1.6820 34.2430 1.6830 34.2270 1.6840 34.2080 1.6850 34.1780 1.6860 34.1470 1.6870 34.1170 1.6880 34.0860 1.6890 34.0580 1.6900 34.0280 1.6910 33.9970 1.6920 33.9620 1.6930 33.9220 1.6940 33.8860 1.6950 33.8450 1.6960 33.8100 1.6970 33.7800 1.6980 33.7480 1.6990 33.7180 1.7000 33.6850 1.7010 33.6530 1.7020 33.6190 1.7030 33.5820 1.7040 33.5460 1.7050 33.5110 1.7060 33.4740 1.7070 33.4370 1.7080 33.3930 1.7090 33.3440 1.7100 33.2970 1.7110 33.2450 1.7120 33.1970 1.7130 33.1540 1.7140 33.1110 1.7150 33.0690 1.7160 33.0290 1.7170 32.9870 1.7180 32.9430 1.7190 32.9040 1.7200 32.8650 1.7210 32.8260 1.7220 32.7880 1.7230 32.7470 1.7240 32.7070 1.7250 32.6690 1.7260 32.6340 1.7270 32.6020 1.7280 32.5680 1.7290 32.5380 1.7300 32.5120 1.7310 32.4860 1.7320 32.4620 1.7330 32.4390 1.7340 32.4160 1.7350 32.3920 1.7360 32.3730 1.7370 32.3470 1.7380 32.3180 1.7390 32.2860 1.7400 32.2510 1.7410 32.2120 1.7420 32.1710 1.7430 32.1260 1.7440 32.0750 1.7450 32.0250 1.7460 31.9730 1.7470 31.9210 1.7480 31.8690 1.7490 31.8170 1.7500 31.7660 1.7510 31.7110 1.7520 31.6570 1.7530 31.6010 1.7540 31.5420 1.7550 31.4930 1.7560 31.4410 1.7570 31.3890 1.7580 31.3370 1.7590 31.2810 1.7600 31.2250 1.7610 31.1770 1.7620 31.1300 1.7630 31.0880 1.7640 31.0430 1.7650 30.9950 1.7660 30.9510 1.7670 30.9050 1.7680 30.8630 1.7690 30.8240 1.7700 30.7860 1.7710 30.7530 1.7720 30.7170 1.7730 30.6810 1.7740 30.6470 1.7750 30.6130 1.7760 30.5800 1.7770 30.5470 1.7780 30.5160 1.7790 30.4840 1.7800 30.4530 1.7810 30.4240 1.7820 30.3980 1.7830 30.3720 1.7840 30.3490 1.7850 30.3290 1.7860 30.3090 1.7870 30.2870 1.7880 30.2730 1.7890 30.2630 1.7900 30.2550 1.7910 30.2450 1.7920 30.2340 1.7930 30.2210 1.7940 30.2050 1.7950 30.1950 1.7960 30.1880 1.7970 30.1800 1.7980 30.1770 1.7990 30.1710 1.8000 30.1620 1.8010 30.1540 1.8020 30.1460 1.8030 30.1400 1.8040 30.1350 1.8050 30.1340 1.8060 30.1390 1.8070 30.1410 1.8080 30.1470 1.8090 30.1430 1.8100 30.1320 1.8110 30.1280 1.8120 30.1250 1.8130 30.1260 1.8140 30.1290 1.8150 30.1240 1.8160 30.1170 1.8170 30.1190 1.8180 30.1240 1.8190 30.1250 1.8200 30.1260 1.8210 30.1210 1.8220 30.1170 1.8230 30.1160 1.8240 30.1150 1.8250 30.1150 1.8260 30.1090 1.8270 30.1140 1.8280 30.1190 1.8290 30.1240 1.8300 30.1280 1.8310 30.1330 1.8320 30.1380 1.8330 30.1430 1.8340 30.1480 1.8350 30.1480 1.8360 30.1120 1.8370 30.0720 1.8380 30.0310 1.8390 29.9840 1.8400 29.9330 1.8410 29.8740 1.8420 29.8080 1.8430 29.7340 1.8440 29.6590 1.8450 29.5780 1.8460 29.4950 1.8470 29.4010 1.8480 29.2980 1.8490 29.1860 1.8500 29.0660 1.8510 28.9390 1.8520 28.8040 1.8530 28.6480 1.8540 28.4820 1.8550 28.3040 1.8560 28.1190 1.8570 27.9340 1.8580 27.7330 1.8590 27.5200 1.8600 27.3000 1.8610 27.0600 1.8620 26.8000 1.8630 26.5350 1.8640 26.2530 1.8650 25.9480 1.8660 25.6260 1.8670 25.2830 1.8680 24.9160 1.8690 24.5290 1.8700 24.1210 1.8710 23.6930 1.8720 23.2510 1.8730 22.7970 1.8740 22.3310 1.8750 21.8600 1.8760 21.3710 1.8770 20.8710 1.8780 20.3660 1.8790 19.8520 1.8800 19.3440 1.8810 18.8360 1.8820 18.3260 1.8830 17.8180 1.8840 17.3180 1.8850 16.8170 1.8860 16.3280 1.8870 15.8510 1.8880 15.3780 1.8890 14.9220 1.8900 14.4830 1.8910 14.0700 1.8920 13.6720 1.8930 13.3060 1.8940 12.9570 1.8950 12.6340 1.8960 12.3400 1.8970 12.0620 1.8980 11.8120 1.8990 11.5840 1.9000 11.3690 1.9010 11.1700 1.9020 10.9910 1.9030 10.8320 1.9040 10.6870 1.9050 10.5600 1.9060 10.4490 1.9070 10.3520 1.9080 10.2720 1.9090 10.2030 1.9100 10.1400 1.9110 10.0860 1.9120 10.0440 1.9130 10.0010 1.9140 9.9640 1.9150 9.9340 1.9160 9.9080 1.9170 9.8860 1.9180 9.8720 1.9190 9.8620 1.9200 9.8530 1.9210 9.8510 1.9220 9.8500 1.9230 9.8510 1.9240 9.8540 1.9250 9.8610 1.9260 9.8660 1.9270 9.8730 1.9280 9.8850 1.9290 9.8980 1.9300 9.9130 1.9310 9.9300 1.9320 9.9520 1.9330 9.9740 1.9340 9.9980 1.9350 10.0210 1.9360 10.0460 1.9370 10.0700 1.9380 10.0960 1.9390 10.1190 1.9400 10.1440 1.9410 10.1740 1.9420 10.2030 1.9430 10.2390 1.9440 10.2740 1.9450 10.3130 1.9460 10.3490 1.9470 10.3830 1.9480 10.4220 1.9490 10.4610 1.9500 10.5020 1.9510 10.5460 1.9520 10.5940 1.9530 10.6420 1.9540 10.6950 1.9550 10.7450 1.9560 10.7930 1.9570 10.8440 1.9580 10.8930 1.9590 10.9470 1.9600 11.0090 1.9610 11.0660 1.9620 11.1220 1.9630 11.1770 1.9640 11.2340 1.9650 11.2900 1.9660 11.3500 1.9670 11.4150 1.9680 11.4780 1.9690 11.5410 1.9700 11.6050 1.9710 11.6680 1.9720 11.7320 1.9730 11.7970 1.9740 11.8640 1.9750 11.9290 1.9760 11.9970 1.9770 12.0640 1.9780 12.1330 1.9790 12.2060 1.9800 12.2830 1.9810 12.3600 1.9820 12.4400 1.9830 12.5180 1.9840 12.5980 1.9850 12.6770 1.9860 12.7560 1.9870 12.8360 1.9880 12.9150 1.9890 12.9910 1.9900 13.0720 1.9910 13.1520 1.9920 13.2310 1.9930 13.3100 1.9940 13.3910 1.9950 13.4720 1.9960 13.5510 1.9970 13.6300 1.9980 13.7110 1.9990 13.7910 2.0000 13.8720 2.0010 13.9520 2.0020 14.0380 2.0030 14.1230 2.0040 14.2010 2.0050 14.2820 2.0060 14.3680 2.0070 14.4480 2.0080 14.5260 2.0090 14.6040 2.0100 14.6780 2.0110 14.7540 2.0120 14.8340 2.0130 14.9090 2.0140 14.9810 2.0150 15.0590 2.0160 15.1340 2.0170 15.2120 2.0180 15.2830 2.0190 15.3510 2.0200 15.4180 2.0210 15.4800 2.0220 15.5470 2.0230 15.6210 2.0240 15.6870 2.0250 15.7480 2.0260 15.8080 2.0270 15.8620 2.0280 15.9170 2.0290 15.9770 2.0300 16.0390 2.0310 16.0970 2.0320 16.1570 2.0330 16.2110 2.0340 16.2590 2.0350 16.3100 2.0360 16.3610 2.0370 16.4100 2.0380 16.4620 2.0390 16.5080 2.0400 16.5440 2.0410 16.5870 2.0420 16.6320 2.0430 16.6750 2.0440 16.7160 2.0450 16.7590 2.0460 16.7990 2.0470 16.8340 2.0480 16.8720 2.0490 16.9130 2.0500 16.9510 2.0510 16.9860 2.0520 17.0240 2.0530 17.0620 2.0540 17.1000 2.0550 17.1350 2.0560 17.1760 2.0570 17.2180 2.0580 17.2610 2.0590 17.3020 2.0600 17.3450 2.0610 17.3830 2.0620 17.4140 2.0630 17.4520 2.0640 17.4900 2.0650 17.5300 2.0660 17.5750 2.0670 17.6150 2.0680 17.6510 2.0690 17.6880 2.0700 17.7190 2.0710 17.7550 2.0720 17.7950 2.0730 17.8330 2.0740 17.8710 2.0750 17.9110 2.0760 17.9470 2.0770 17.9790 2.0780 18.0150 2.0790 18.0520 2.0800 18.0880 2.0810 18.1280 2.0820 18.1630 2.0830 18.1930 2.0840 18.2230 2.0850 18.2480 2.0860 18.2750 2.0870 18.3020 2.0880 18.3320 2.0890 18.3560 2.0900 18.3780 2.0910 18.4000 2.0920 18.4180 2.0930 18.4410 2.0940 18.4650 2.0950 18.4920 2.0960 18.5210 2.0970 18.5510 2.0980 18.5820 2.0990 18.6150 2.1000 18.6560 2.1010 18.6860 2.1020 18.7160 2.1030 18.7390 2.1040 18.7540 2.1050 18.7610 2.1060 18.7730 2.1070 18.7820 2.1080 18.7970 2.1090 18.8160 2.1100 18.8310 2.1110 18.8500 2.1120 18.8650 2.1130 18.8770 2.1140 18.8880 2.1150 18.8980 2.1160 18.9080 2.1170 18.9190 2.1180 18.9260 2.1190 18.9300 2.1200 18.9380 2.1210 18.9420 2.1220 18.9510 2.1230 18.9600 2.1240 18.9770 2.1250 18.9780 2.1260 18.9680 2.1270 18.9690 2.1280 18.9650 2.1290 18.9710 2.1300 18.9920 2.1310 19.0030 2.1320 19.0110 2.1330 19.0210 2.1340 19.0250 2.1350 19.0360 2.1360 19.0510 2.1370 19.0670 2.1380 19.0880 2.1390 19.1080 2.1400 19.1270 2.1410 19.1520 2.1420 19.1750 2.1430 19.2010 2.1440 19.2210 2.1450 19.2360 2.1460 19.2530 2.1470 19.2670 2.1480 19.2840 2.1490 19.3060 2.1500 19.3260 2.1510 19.3460 2.1520 19.3660 2.1530 19.3870 2.1540 19.4040 2.1550 19.4190 2.1560 19.4310 2.1570 19.4360 2.1580 19.4450 2.1590 19.4520 2.1600 19.4610 2.1610 19.4880 2.1620 19.5050 2.1630 19.5210 2.1640 19.5430 2.1650 19.5560 2.1660 19.5590 2.1670 19.5700 2.1680 19.5810 2.1690 19.5920 2.1700 19.6030 2.1710 19.6160 2.1720 19.6330 2.1730 19.6470 2.1740 19.6640 2.1750 19.6750 2.1760 19.6800 2.1770 19.6880 2.1780 19.6930 2.1790 19.7040 2.1800 19.7170 2.1810 19.7330 2.1820 19.7570 2.1830 19.7810 2.1840 19.8010 2.1850 19.8230 2.1860 19.8410 2.1870 19.8510 2.1880 19.8700 2.1890 19.8920 2.1900 19.9150 2.1910 19.9450 2.1920 19.9680 2.1930 19.9900 2.1940 20.0060 2.1950 20.0130 2.1960 20.0250 2.1970 20.0350 2.1980 20.0500 2.1990 20.0730 2.2000 20.0940 2.2010 20.1180 2.2020 20.1360 2.2030 20.1490 2.2040 20.1650 2.2050 20.1770 2.2060 20.1920 2.2070 20.2080 2.2080 20.2260 2.2090 20.2410 2.2100 20.2530 2.2110 20.2710 2.2120 20.2930 2.2130 20.3120 2.2140 20.3330 2.2150 20.3530 2.2160 20.3710 2.2170 20.3850 2.2180 20.3980 2.2190 20.4080 2.2200 20.4120 2.2210 20.4190 2.2220 20.4330 2.2230 20.4360 2.2240 20.4300 2.2250 20.4270 2.2260 20.4200 2.2270 20.4130 2.2280 20.4150 2.2290 20.4040 2.2300 20.3850 2.2310 20.3590 2.2320 20.3290 2.2330 20.2960 2.2340 20.2680 2.2350 20.2450 2.2360 20.2140 2.2370 20.1860 2.2380 20.1470 2.2390 20.1020 2.2400 20.0560 2.2410 20.0030 2.2420 19.9540 2.2430 19.9060 2.2440 19.8540 2.2450 19.7960 2.2460 19.7470 2.2470 19.7010 2.2480 19.6540 2.2490 19.6200 2.2500 19.5720 2.2510 19.5100 2.2520 19.4580 2.2530 19.4010 2.2540 19.3500 2.2550 19.3110 2.2560 19.2670 2.2570 19.2200 2.2580 19.1770 2.2590 19.1310 2.2600 19.0910 2.2610 19.0520 2.2620 19.0260 2.2630 18.9980 2.2640 18.9720 2.2650 18.9590 2.2660 18.9310 2.2670 18.8960 2.2680 18.8560 2.2690 18.8170 2.2700 18.7660 2.2710 18.7230 2.2720 18.6830 2.2730 18.6430 2.2740 18.6050 2.2750 18.5620 2.2760 18.5160 2.2770 18.4790 2.2780 18.4410 2.2790 18.4030 2.2800 18.3740 2.2810 18.3400 2.2820 18.2970 2.2830 18.2610 2.2840 18.2330 2.2850 18.1970 2.2860 18.1610 2.2870 18.1190 2.2880 18.0720 2.2890 18.0220 2.2900 17.9600 2.2910 17.9140 2.2920 17.8800 2.2930 17.8370 2.2940 17.8030 2.2950 17.7570 2.2960 17.6920 2.2970 17.6340 2.2980 17.5710 2.2990 17.5230 2.3000 17.4770 2.3010 17.4220 2.3020 17.3600 2.3030 17.3000 2.3040 17.2440 2.3050 17.1870 2.3060 17.1490 2.3070 17.1190 2.3080 17.0740 2.3090 17.0340 2.3100 17.0010 2.3110 16.9620 2.3120 16.9280 2.3130 16.9050 2.3140 16.8790 2.3150 16.8520 2.3160 16.8240 2.3170 16.7980 2.3180 16.7750 2.3190 16.7530 2.3200 16.7240 2.3210 16.7000 2.3220 16.6640 2.3230 16.6100 2.3240 16.5690 2.3250 16.5360 2.3260 16.4890 2.3270 16.4350 2.3280 16.3850 2.3290 16.3240 2.3300 16.2790 2.3310 16.2530 2.3320 16.2160 2.3330 16.1760 2.3340 16.1410 2.3350 16.1070 2.3360 16.0730 2.3370 16.0480 2.3380 16.0110 2.3390 15.9630 2.3400 15.9200 2.3410 15.8780 2.3420 15.8350 2.3430 15.7900 2.3440 15.7510 2.3450 15.7080 2.3460 15.6760 2.3470 15.6610 2.3480 15.6380 2.3490 15.6140 2.3500 15.5930 2.3510 15.5720 2.3520 15.5490 2.3530 15.5240 2.3540 15.5170 2.3550 15.4880 2.3560 15.4520 2.3570 15.4260 2.3580 15.3780 2.3590 15.3340 2.3600 15.3080 2.3610 15.2750 2.3620 15.2450 2.3630 15.2240 2.3640 15.1960 2.3650 15.1730 2.3660 15.1440 2.3670 15.1040 2.3680 15.0720 2.3690 15.0360 2.3700 14.9970 2.3710 14.9600 2.3720 14.9290 2.3730 14.8800 2.3740 14.8270 2.3750 14.7940 2.3760 14.7380 2.3770 14.6720 2.3780 14.6160 2.3790 14.5490 2.3800 14.4920 2.3810 14.4770 2.3820 14.4570 2.3830 14.4170 2.3840 14.3720 2.3850 14.3140 2.3860 14.2420 2.3870 14.1830 2.3880 14.1440 2.3890 14.0920 2.3900 14.0420 2.3910 13.9890 2.3920 13.9210 2.3930 13.8640 2.3940 13.8200 2.3950 13.7840 2.3960 13.7510 2.3970 13.7180 2.3980 13.6810 2.3990 13.6200 2.4000 13.5680 2.4010 13.5300 2.4020 13.4710 2.4030 13.4350 2.4040 13.4160 2.4050 13.3490 2.4060 13.2680 2.4070 13.2130 2.4080 13.1500 2.4090 13.1030 2.4100 13.0990 2.4110 13.0850 2.4120 13.0600 2.4130 13.0370 2.4140 12.9700 2.4150 12.8800 2.4160 12.8010 2.4170 12.7510 2.4180 12.7340 2.4190 12.6910 2.4200 12.6330 2.4210 12.5880 2.4220 12.5050 2.4230 12.4440 2.4240 12.4290 2.4250 12.3530 2.4260 12.2840 2.4270 12.2550 2.4280 12.2000 2.4290 12.1340 2.4300 12.0620 2.4310 11.9810 2.4320 11.9090 2.4330 11.8700 2.4340 11.8270 2.4350 11.7920 2.4360 11.7560 2.4370 11.7150 2.4380 11.7100 2.4390 11.6670 2.4400 11.5930 2.4410 11.5140 2.4420 11.4290 2.4430 11.3860 2.4440 11.3490 2.4450 11.3010 2.4460 11.2670 2.4470 11.2290 2.4480 11.1860 2.4490 11.1570 2.4500 11.1240 2.4510 11.0630 2.4520 11.0140 2.4530 10.9980 2.4540 10.9560 2.4550 10.9000 2.4560 10.8560 2.4570 10.7990 2.4580 10.7250 2.4590 10.6770 2.4600 10.6630 2.4610 10.6180 2.4620 10.5980 2.4630 10.6150 2.4640 10.5800 2.4650 10.5600 2.4660 10.5500 2.4670 10.4610 2.4680 10.3740 2.4690 10.2810 2.4700 10.2160 2.4710 10.1980 2.4720 10.1510 2.4730 10.0830 2.4740 10.0350 2.4750 9.9830 2.4760 9.9630 2.4770 10.0050 2.4780 10.0350 2.4790 10.0510 2.4800 10.0350 2.4810 9.9870 2.4820 9.9230 2.4830 9.9110 2.4840 9.8840 2.4850 9.9120 2.4860 9.8880 2.4870 9.8590 2.4880 9.8820 2.4890 9.8370 2.4900 9.7580 2.4910 9.7410 2.4920 9.7010 2.4930 9.6720 2.4940 9.7790 2.4950 9.8060 2.4960 9.7570 2.4970 9.7400 2.4980 9.6600 2.4990 9.6030 2.5000 9.6530 spectral-0.22.4/spectral/tests/data/usgs/000077500000000000000000000000001412674721200202735ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/000077500000000000000000000000001412674721200217555ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/000077500000000000000000000000001412674721200226645ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/ChapterB_b0/000077500000000000000000000000001412674721200247355ustar00rootroot00000000000000liba_Material_a_b0_0_ASDFRa_AREF.txt000077500000000000000000000007141412674721200331260ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/ChapterB_b0 liba Record=1: Material a b0 0 ASDFRa AREF 3.3849356e-001 3.3610347e-001 3.3743247e-001 3.3936349e-001 3.3469674e-001 3.3387077e-001 3.3453774e-001 3.3583394e-001 3.3526355e-001 3.3518928e-001 3.3557180e-001 3.3564091e-001 3.3513004e-001 3.3534676e-001 3.3781785e-001 3.3781108e-001 5.1171792e-001 5.1241201e-001 5.1288933e-001 5.1331484e-001 5.1393366e-001 5.1682192e-001 5.1465780e-001 5.1551914e-001 liba_Material_a_b0_1_NIC4a_RREF.txt000077500000000000000000000014401412674721200330030ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/ChapterB_b0 liba Record=21: Material_a_b0_1 NIC4a RREF -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 liba_Material_a_b0_2-way_trans_RefStd_NIC4aa_TRAN.txt000077500000000000000000000014541412674721200364340ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/ChapterB_b0 liba Record=20: Materialab02-way_trans RefStd NIC4aa TRAN -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/ChapterD_d0/000077500000000000000000000000001412674721200247415ustar00rootroot00000000000000liba_MaterialD_00_AVIRISb_RTGC.txt000077500000000000000000000003471412674721200326200ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/ChapterD_d0 liba Record=19: MaterialD 00 AVIRISb RTGC -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 5.3650327e-003 4.6643792e-003 4.7513070e-003 5.9810448e-003 2.6704246e-002 -1.2300000e+034 -1.2300000e+034 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/ChapterD_d0/liba_MaterialD_01_BECKa_AREF.txt000077500000000000000000000004201412674721200323540ustar00rootroot00000000000000 liba Record=18: MaterialD 01 BECKa AREF -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 1.6467798e-001 1.4743474e-001 1.3371170e-001 1.3586864e-001 1.3986646e-001 1.4692105e-001 -1.2300000e+034 -1.2300000e+034 -1.2300000e+034 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Bandpass_(FWHM)_ASDFR_SR.txt000077500000000000000000000007031412674721200304370ustar00rootroot00000000000000 liba Record=17: Bandpass (FWHM) ASDFR SR 8.6500905e-003 8.6194901e-003 8.5889800e-003 8.5585602e-003 8.5282298e-003 8.4979897e-003 8.4678298e-003 8.4377695e-003 8.4077902e-003 8.3779003e-003 8.3480999e-003 8.3183898e-003 8.2887597e-003 8.2592294e-003 8.2297893e-003 8.2004303e-003 8.1711598e-003 8.1419805e-003 1.1200000e-002 1.1200000e-002 1.1200000e-002 1.1200000e-002 1.1200000e-002 1.1200000e-002 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Bandpass_(FWHM)_AVIRIS_1996_um.txt000077500000000000000000000003621412674721200313630ustar00rootroot00000000000000 splib07a Record=16: Bandpass (FWHM) AVIRIS 1996 um 9.7999992e-003 9.7999992e-003 9.7999992e-003 9.7899996e-003 9.7799990e-003 9.7599998e-003 9.7399997e-003 9.2199994e-003 9.2099989e-003 9.1899997e-003 9.1799991e-003 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Bandpass_(FWHM)_BECK_Beckman_um.txt000077500000000000000000000004171412674721200320230ustar00rootroot00000000000000 liba Record=15: Bandpass (FWHM) BECK Beckman um 8.0000004e-003 8.0000004e-003 8.0000004e-003 7.0000002e-003 6.0000001e-003 6.0000001e-003 5.5000000e-003 4.9999999e-003 3.1999998e-002 3.1999998e-002 3.1999998e-002 3.1999998e-002 3.1999998e-002 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Bandpass_(FWHM)_NIC4_Nicolet_um.txt000077500000000000000000000014371412674721200320540ustar00rootroot00000000000000 liba Record=14: Bandpass (FWHM) NIC4 Nicolet um 5.0401688e-004 5.0437450e-004 5.0449371e-004 5.0473213e-004 5.0485134e-004 5.0508976e-004 5.0544739e-004 5.0556660e-004 5.0580502e-004 5.0604343e-004 5.0628185e-004 5.0652027e-004 5.0675869e-004 5.0699711e-004 5.0711632e-004 5.0735474e-004 5.0747395e-004 5.0783157e-004 5.0795078e-004 5.0818920e-004 5.0842762e-004 5.0866604e-004 5.0890446e-004 5.0914288e-004 5.0938129e-004 5.0950050e-004 5.0985813e-004 5.0997734e-004 5.1021576e-004 6.3992386e+000 6.7234116e+000 7.0727539e+000 7.4501801e+000 9.3079987e+000 9.8812256e+000 1.0509109e+001 1.1198944e+001 1.1958893e+001 1.2798935e+001 1.3730743e+001 1.4768204e+001 1.5927902e+001 1.7229858e+001 1.8698349e+001 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Wavelengths_ASD_0.35-2.5_um.txt000077500000000000000000000007061412674721200310500ustar00rootroot00000000000000 liba Record=13: Wavelengths ASD 0.35-2.5 um 3.4999999e-001 3.5100001e-001 3.5200000e-001 3.5299999e-001 3.5400000e-001 3.5499999e-001 3.5600001e-001 3.5699999e-001 3.5800001e-001 3.5900000e-001 3.6000001e-001 3.6100000e-001 3.6199999e-001 3.6300001e-001 3.6399999e-001 3.6500001e-001 2.4930000e+000 2.4940000e+000 2.4949999e+000 2.4960001e+000 2.4970000e+000 2.4979999e+000 2.4990001e+000 2.5000000e+000 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Wavelengths_AVIRIS_1996_0.37-2.5_um.txt000077500000000000000000000003611412674721200321250ustar00rootroot00000000000000 liba Record=12: Wavelengths AVIRIS 1996 0.37-2.5 um 3.7049001e-001 3.8021001e-001 3.8993001e-001 3.9967003e-001 4.0942001e-001 4.1918999e-001 4.2897001e-001 2.4778800e+000 2.4877899e+000 2.4976900e+000 2.5075898e+000 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Wavelengths_BECK_Beckman_0.2-3.0_um.txt000077500000000000000000000004231412674721200324670ustar00rootroot00000000000000 liba Record=11: Wavelengths BECK Beckman 0.2-3.0 um 2.0510000e-001 2.1310002e-001 2.2110000e-001 2.2910000e-001 2.3610000e-001 2.4210002e-001 2.4810001e-001 2.5360000e-001 2.8479998e+000 2.8799999e+000 2.9120002e+000 2.9440000e+000 2.9760001e+000 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/liba/liba_Wavelengths_NIC4_Nicolet_1.12-216um.txt000077500000000000000000000014421412674721200324520ustar00rootroot00000000000000 liba Record=10: Wavelengths NIC4 Nicolet 1.12-216um 1.1225606e+000 1.1228037e+000 1.1230469e+000 1.1232901e+000 1.1235335e+000 1.1237770e+000 1.1240208e+000 1.1242644e+000 1.1245083e+000 1.1247522e+000 1.1249962e+000 1.1252402e+000 1.1254845e+000 1.1257288e+000 1.1259736e+000 1.1262181e+000 1.1264627e+000 1.1267076e+000 1.1269524e+000 1.1271973e+000 1.1274424e+000 1.1276876e+000 1.1279331e+000 1.1281785e+000 1.1284240e+000 1.1286696e+000 1.1289154e+000 1.1291611e+000 1.1294072e+000 9.9695114e+001 1.0164992e+002 1.0368291e+002 1.0579889e+002 1.0800303e+002 1.1030139e+002 1.1269925e+002 1.1520368e+002 1.7280489e+002 1.7876367e+002 1.8514809e+002 1.9200540e+002 1.9939021e+002 2.0736581e+002 2.1600604e+002 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/000077500000000000000000000000001412674721200226665ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/ChapterB_b0/000077500000000000000000000000001412674721200247375ustar00rootroot00000000000000libc_Material_a_b0_0_ASDFRa_AREF.txt000077500000000000000000000002521412674721200331270ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/ChapterB_b0libc Record=9: Material a b0 0 ASDFRa AREF 3.4908235e-001 3.5491350e-001 3.6365876e-001 3.6831656e-001 3.7031063e-001 4.7417316e-001 5.1130873e-001 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/ChapterD_d0/000077500000000000000000000000001412674721200247435ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/ChapterD_d0/libc_MaterialD_1_ASDFRa_AREF.txt000077500000000000000000000002401412674721200324330ustar00rootroot00000000000000libc Record=8: Material D 1 ASDFRa AREF 1.6422345e-001 1.9375280e-001 2.4432163e-001 3.1562468e-001 4.9221447e-001 5.4595965e-001 3.7829378e-001 libc_MaterialD_2_AVIRISb_RTGC.txt000077500000000000000000000002411412674721200325370ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/ChapterD_d0libc Record=7: Material D 2 AVIRISb RTGC 1.0381651e-002 1.2283494e-002 2.1876834e-002 1.7211426e-002 1.3327430e-001 5.9285060e-002 3.1065322e-002 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/errorbars/000077500000000000000000000000001412674721200246675ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/errorbars/ignore.txt000066400000000000000000000000321412674721200267060ustar00rootroot00000000000000THIS_SHOULD_NOT_BE_PARSED libc_Bandpass_(FWHM)_Landsat8_(7_bands)_nm.txt000077500000000000000000000002611412674721200330060ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc libc Record=6: Bandpass (FWHM) Landsat8 (7 bands) nm 1.5963058e+001 6.0072605e+001 5.7379478e+001 3.7491341e+001 2.8185131e+001 8.4664261e+001 1.8672125e+002 libc_Bandpass_(FWHM)_Landsat8_(7_bands)_um.txt000077500000000000000000000002611412674721200330150ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc libc Record=5: Bandpass (FWHM) Landsat8 (7 bands) um 1.5963057e-002 6.0072605e-002 5.7379477e-002 3.7491340e-002 2.8185131e-002 8.4664263e-002 1.8672125e-001 spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc/libc_SRF_Band_2_Landsat8_Blue.txt000077500000000000000000000005521412674721200307310ustar00rootroot00000000000000 libc Record=4: SRF Band 2 Landsat8 Blue 0.0000000e+000 0.0000000e+000 0.0000000e+000 1.1700000e-004 2.4100000e-004 3.4900001e-004 4.5500000e-004 7.5599999e-004 3.7120001e-003 5.8605999e-002 1.3087600e-001 2.7136999e-001 4.9354199e-001 7.2397101e-001 0.0000000e+000 0.0000000e+000 0.0000000e+000 0.0000000e+000libc_Wavelengths_Landsat8_(7_bands)_microns.txt000077500000000000000000000002611412674721200336400ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc libc Record=3: Wavelengths Landsat8 (7 bands) microns 4.4299144e-001 4.8326388e-001 5.6136143e-001 6.5470088e-001 8.6447239e-001 1.6095761e+000 2.2020326e+000 libc_Wavelengths_Landsat8oli_SpecRespFunction.txt000077500000000000000000000005541412674721200344130ustar00rootroot00000000000000spectral-0.22.4/spectral/tests/data/usgs/ASCIIdata/libc libc Record=2: Wavelengths Landsat8oli SpecRespFunction 4.1999999e-001 4.2100000e-001 4.2199999e-001 4.2300001e-001 4.2399999e-001 4.2500001e-001 4.2600000e-001 4.2699999e-001 4.2800000e-001 2.3520000e+000 2.3529999e+000 2.3540001e+000 2.3550000e+000 2.3559999e+000 2.3570001e+000 2.3580000e+000 2.3590000e+000 2.3599999e+000 spectral-0.22.4/spectral/tests/database.py000066400000000000000000000147661412674721200205350ustar00rootroot00000000000000''' Runs unit tests of functions associated with spectral databases. To run the unit tests, type the following from the system command line: # python -m spectral.tests.database Note that the ECOSTRESS database must be requested so if the data files are not located on the local file system, these tests will be skipped. ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np import os from numpy.testing import assert_almost_equal import spectral as spy from spectral.io.aviris import read_aviris_bands from spectral.tests import testdir from spectral.tests.spytest import SpyTest ECOSTRESS_DATA_DIR = os.path.join(os.path.split(__file__)[0], 'data/ecostress') ECOSTRESS_DB = os.path.join(testdir, 'ecostress.db') USGS_DATA_DIR = os.path.join(os.path.split(__file__)[0], 'data/usgs/ASCIIdata') USGS_DB = os.path.join(testdir, 'usgs.db') AVIRIS_BAND_FILE = os.path.join(os.path.split(__file__)[0], 'data/92AV3C.spc') class ECOSTRESSDatabaseCreationTest(SpyTest): '''Tests ECOSTRESS database creation from text files.''' def __init__(self): pass def setup(self): if not os.path.isdir(testdir): os.makedirs(testdir) if os.path.exists(ECOSTRESS_DB): os.remove(ECOSTRESS_DB) def test_create_database(self): '''Test creating new database from ECOSTRESS data files.''' db = spy.EcostressDatabase.create(ECOSTRESS_DB, ECOSTRESS_DATA_DIR) assert(list(db.query('SELECT COUNT() FROM Spectra'))[0][0] == 3) class ECOSTRESSDatabaseTest(SpyTest): '''Tests that ECOSTRESS database works properly''' def __init__(self): pass def setup(self): self.db = spy.EcostressDatabase(ECOSTRESS_DB) def test_read_signatures(self): '''Can get spectra from the opened database.''' assert(list(self.db.query('SELECT COUNT() FROM Spectra'))[0][0] == 3) def test_create_envi_lib(self): '''Can resample spectra and create an ENVI spectral library.''' bands = read_aviris_bands(AVIRIS_BAND_FILE) cursor = self.db.query('SELECT SpectrumID FROM Spectra') ids = [r[0] for r in cursor] bands.centers = [x / 1000. for x in bands.centers] bands.bandwidths = [x / 1000. for x in bands.bandwidths] slib = self.db.create_envi_spectral_library(ids, bands) assert(slib.spectra.shape == (3, 220)) class USGSDatabaseCreationTest(SpyTest): '''Tests USGS database creation from text files.''' def __init__(self): pass def setup(self): if not os.path.isdir(testdir): os.makedirs(testdir) if os.path.exists(USGS_DB): os.remove(USGS_DB) def test_create_database(self): '''Test creating new database from USGS data files.''' db = spy.USGSDatabase.create(USGS_DB, USGS_DATA_DIR) assert(list(db.query('SELECT COUNT() FROM Samples'))[0][0] == 8) assert(list(db.query('SELECT COUNT() FROM SpectrometerData')) [0][0] == 13) class USGSDatabaseTest(SpyTest): '''Tests that USGS database works properly''' def __init__(self): pass def setup(self): self.db = spy.USGSDatabase(USGS_DB) def test_read_signatures(self): '''Can get spectra from the opened database.''' assert(list(self.db.query('SELECT COUNT() FROM Samples'))[0][0] == 8) assert(list(self.db.query('SELECT COUNT() FROM SpectrometerData')) [0][0] == 13) some_sample = list(self.db.query('''SELECT Chapter, FileName, AssumedWLSpmeterDataID, NumValues, MinValue, MaxValue FROM Samples WHERE LibName='liba' AND Record=1 AND Description='Material a b0 0 ASDFRa AREF' AND Spectrometer='ASDFR' AND Purity='a' AND MeasurementType='AREF' '''))[0] assert(some_sample[0] == 'ChapterB_b0') assert(some_sample[1] == 'liba_Material_a_b0_0_ASDFRa_AREF.txt') assert(some_sample[3] == 24) assert_almost_equal(some_sample[4], 0.33387077) assert_almost_equal(some_sample[5], 0.51682192) some_spectrometer_data = list(self.db.query('''SELECT LibName, Record, MeasurementType, Unit, Name, Description, FileName, NumValues, MinValue, MaxValue FROM SpectrometerData WHERE SpectrometerDataID=? ''', (some_sample[2],)))[0] assert(some_spectrometer_data[0] == 'liba') assert(some_spectrometer_data[1] == 13) assert(some_spectrometer_data[2] == 'Wavelengths') assert(some_spectrometer_data[3] == 'micrometer') assert(some_spectrometer_data[4] == 'ASD') assert(some_spectrometer_data[5] == 'Wavelengths ASD 0.35-2.5 um') assert(some_spectrometer_data[6] == 'liba_Wavelengths_ASD_0.35-2.5_um.txt') assert(some_spectrometer_data[7] == 24) assert_almost_equal(some_spectrometer_data[8], 0.35) assert_almost_equal(some_spectrometer_data[9], 2.5) def test_get_spectrum(self): some_sample_id = list(self.db.query('''SELECT SampleID FROM Samples WHERE LibName='libc' AND Description='Material D 2 AVIRISb RTGC' '''))[0][0] (x, y) = self.db.get_spectrum(some_sample_id) assert(len(x) == len(y)) assert(len(y) == 7) assert_almost_equal(y[0], 0.010381651) assert_almost_equal(x[-1], 2.2020326) def test_create_envi_lib(self): '''Can resample spectra and create an ENVI spectral library.''' bands = read_aviris_bands(AVIRIS_BAND_FILE) cursor = self.db.query('SELECT SampleID FROM Samples') ids = [r[0] for r in cursor] bands.centers = [x / 1000. for x in bands.centers] bands.bandwidths = [x / 1000. for x in bands.bandwidths] slib = self.db.create_envi_spectral_library(ids, bands) assert(slib.spectra.shape == (8, 220)) def run(): print('\n' + '-' * 72) print('Running database tests.') print('-' * 72) for T in [ECOSTRESSDatabaseCreationTest, ECOSTRESSDatabaseTest, USGSDatabaseCreationTest, USGSDatabaseTest]: T().run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary import logging logging.getLogger('spectral').setLevel(logging.ERROR) parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/detectors.py000066400000000000000000000144671412674721200207630ustar00rootroot00000000000000''' Runs unit tests for various target detectors. To run the unit tests, type the following from the system command line: # python -m spectral.tests.detectors ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np from numpy.testing import assert_allclose import spectral as spy from spectral.tests.spytest import SpyTest class MatchedFilterTest(SpyTest): def setup(self): from spectral.algorithms.detectors import MatchedFilter self.data = spy.open_image('92AV3C.lan').load() self.background = spy.calc_stats(self.data) self.target_ij = [33, 87] # self.target = self.data[33, 87] (i, j) = self.target_ij self.mf = MatchedFilter(self.background, self.data[i, j]) def test_mf_bg_eq_zero(self): '''Matched Filter response of background should be zero.''' (i, j) = self.target_ij np.testing.assert_approx_equal(self.mf(self.background.mean), 0) def test_mf_target_eq_one(self): '''Matched Filter response of target should be one.''' from spectral.algorithms.detectors import matched_filter (i, j) = self.target_ij target = self.data[i, j] mf = matched_filter(self.data, target, self.background) np.testing.assert_approx_equal(mf[i, j], 1) def test_mf_target_no_bg_eq_one(self): '''Matched Filter response of target should be one.''' from spectral.algorithms.detectors import matched_filter (i, j) = self.target_ij target = self.data[i, j] mf = matched_filter(self.data, target) np.testing.assert_approx_equal(mf[i, j], 1) def test_mf_target_pixel_eq_one(self): '''Matched Filter response of target pixel should be one.''' (i, j) = self.target_ij np.testing.assert_approx_equal(self.mf(self.data)[i, j], 1) def test_mf_windowed_target_eq_one(self): '''Windowed Matched Filter response of target pixel should be one.''' X = self.data[:10, :10, :] ij = (3, 3) y = spy.matched_filter(X, X[ij], window=(3,7), cov=self.background.cov) np.allclose(1, y[ij]) class RXTest(SpyTest): def setup(self): self.data = spy.open_image('92AV3C.lan').load() self.background = spy.calc_stats(self.data) def test_rx_bg_eq_zero(self): from spectral.algorithms.detectors import rx, RX d = rx(self.data) stats = spy.calc_stats(self.data) np.testing.assert_approx_equal(rx(stats.mean, background=stats), 0) class ACETest(SpyTest): def setup(self): self.data = spy.open_image('92AV3C.lan').load() self.bg = spy.calc_stats(self.data) self.X = self.data[:20, :20, :] def test_ace_bg_eq_zero(self): '''ACE score of background mean should be zero.''' ij = (10, 10) y = spy.ace(self.bg.mean, self.X[ij], background=self.bg) assert(np.allclose(0, y)) def test_ace_pixel_target_eq_one(self): '''ACE score of target should be one for single pixel arg.''' ij = (10, 10) y = spy.ace(self.X[ij], self.X[ij], background=self.bg) assert(np.allclose(1, y)) def test_ace_novec_pixel_target_eq_one(self): '''ACE score of target should be one for single pixel arg.''' ij = (10, 10) y = spy.ace(self.X[ij], self.X[ij], background=self.bg, vectorize=False) assert(np.allclose(1, y)) def test_ace_target_eq_one(self): '''ACE score of target should be one.''' ij = (10, 10) y = spy.ace(self.X, self.X[ij], background=self.bg) assert(np.allclose(1, y[ij])) def test_ace_novec_target_eq_one(self): '''ACE score (without vectorization) of target should be one.''' ij = (10, 10) y = spy.ace(self.X, self.X[ij], background=self.bg, vectorize=False) assert(np.allclose(1, y[ij])) def test_ace_multi_targets_eq_one(self): '''ACE score of multiple targets should each be one.''' ij1 = (10, 10) ij2 = (3, 12) y = spy.ace(self.X, [self.X[ij1], self.X[ij2]], background=self.bg) assert(np.allclose(1, [y[ij1][0], y[ij2][1]])) def test_ace_novec_multi_targets_eq_one(self): '''ACE score of multiple targets should each be one.''' ij1 = (10, 10) ij2 = (3, 12) y = spy.ace(self.X, [self.X[ij1], self.X[ij2]], background=self.bg, vectorize=False) assert(np.allclose(1, [y[ij1][0], y[ij2][1]])) def test_ace_multi_targets_bg_eq_zero(self): '''ACE score of background for multiple targets should be one.''' ij1 = (10, 10) ij2 = (3, 12) y = spy.ace(self.bg.mean, [self.X[ij1], self.X[ij2]], background=self.bg) assert(np.allclose(0, y)) def test_ace_subspace_targets_eq_one(self): '''ACE score of targets defining target subspace should each be one.''' ij1 = (10, 10) ij2 = (3, 12) y = spy.ace(self.X, np.array([self.X[ij1], self.X[ij2]]), background=self.bg) assert(np.allclose(1, [y[ij1], y[ij2]])) def test_ace_novec_subspace_targets_eq_one(self): '''ACE score of targets defining target subspace should each be one.''' ij1 = (10, 10) ij2 = (3, 12) y = spy.ace(self.X, np.array([self.X[ij1], self.X[ij2]]), background=self.bg, vectorize=False) assert(np.allclose(1, [y[ij1], y[ij2]])) def test_ace_subspace_bg_eq_zero(self): '''ACE score of background for target subspace should be zero.''' ij1 = (10, 10) ij2 = (3, 12) y = spy.ace(self.bg.mean, np.array([self.X[ij1], self.X[ij2]]), background=self.bg) assert(np.allclose(0, y)) def test_ace_windowed_target_eq_one(self): '''ACE score of target for windowed background should be one.''' ij = (10, 10) y = spy.ace(self.X, self.X[ij], window=(3,7), cov=self.bg.cov) assert(np.allclose(1, y[ij])) def run(): print('\n' + '-' * 72) print('Running target detector tests.') print('-' * 72) for T in [MatchedFilterTest, RXTest, ACETest]: T().run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/dimensionality.py000066400000000000000000000135751412674721200220160ustar00rootroot00000000000000''' Runs unit tests for dimensionality reduction algorithms. To run the unit tests, type the following from the system command line: # python -m spectral.tests.dimensionality ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np import spectral as spy from spectral.tests.spytest import SpyTest, test_method class DimensionalityTest(SpyTest): '''Tests various math functions.''' def setup(self): self.data = spy.open_image('92AV3C.lan').load() def test_mnf_all_equals_data(self): '''Test that MNF transform with all components equals original data.''' data = self.data signal = spy.calc_stats(data) noise = spy.noise_from_diffs(data[117: 137, 85: 122, :]) mnfr = spy.mnf(signal, noise) denoised = mnfr.denoise(data, num=data.shape[-1]) assert(np.allclose(denoised, data)) def test_ppi(self): '''Tests that ppi function runs''' data = self.data p = spy.ppi(data, 4) def test_ppi_threshold(self): '''Tests that ppi function runs with threshold arg''' data = self.data p = spy.ppi(data, 4, 10) def test_ppi_continues(self): '''Tests that running ppi with initial indices works as expected.''' data = self.data s = np.random.get_state() p = spy.ppi(data, 4) np.random.set_state(s) p2 = spy.ppi(data, 2) p2 = spy.ppi(data, 2, start=p2) assert(np.all(p == p2)) def test_ppi_centered(self): '''Tests that ppi with mean-subtracted data works as expected.''' data = self.data s = np.random.get_state() p = spy.ppi(data, 4) np.random.set_state(s) data_centered = data - spy.calc_stats(data).mean p2 = spy.ppi(data_centered, 4) assert(np.all(p == p2)) def test_smacc_minimal(self): '''Tests smacc correctness on minimal example.''' H = np.array([ [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [1.0, 1.0, 0.0], [0.0, 1.0, 1.0] ]) S, F, R = spy.smacc(H) assert(np.allclose(np.matmul(F, S) + R, H)) assert(np.min(F) == 0.0) expected_S = np.array([ # First two longer ones. [1., 1., 0.], [0., 1., 1.], # First of the two shorted ones. Other can be expressed other 3. [1., 0., 0.], ]) assert(np.array_equal(S, expected_S)) def test_smacc_runs(self): '''Tests that smacc runs without additional arguments.''' # Without scaling numeric errors accumulate. scaled_data = self.data / 10000 S, F, R = spy.smacc(scaled_data) data_shape = scaled_data.shape H = scaled_data.reshape(data_shape[0] * data_shape[1], data_shape[2]) assert(np.allclose(np.matmul(F, S) + R, H)) assert(np.min(F) == 0.0) assert(len(S.shape) == 2 and S.shape[0] == 9 and S.shape[1] == 220) def test_smacc_min_endmembers(self): '''Tests that smacc runs with min_endmember argument.''' # Without scaling numeric errors accumulate. scaled_data = self.data / 10000 S, F, R = spy.smacc(scaled_data, 10) data_shape = scaled_data.shape H = scaled_data.reshape(data_shape[0] * data_shape[1], data_shape[2]) assert(np.allclose(np.matmul(F, S) + R, H)) assert(np.min(F) == 0.0) assert(len(S.shape) == 2 and S.shape[0] == 10 and S.shape[1] == 220) def test_smacc_max_residual_norm(self): '''Tests that smacc runs with max_residual_norm argument.''' # Without scaling numeric errors accumulate. scaled_data = self.data / 10000 S, F, R = spy.smacc(scaled_data, 9, 0.8) data_shape = scaled_data.shape H = scaled_data.reshape(data_shape[0] * data_shape[1], data_shape[2]) assert(np.allclose(np.matmul(F, S) + R, H)) assert(np.min(F) == 0.0) residual_norms = np.einsum('ij,ij->i', R, R) assert(np.max(residual_norms) <= 0.8) def test_pca_runs(self): '''Should be able to compute PCs and transform data.''' data = self.data xdata = spy.principal_components(data).transform(data) def test_pca_runs_from_stats(self): '''Should be able to pass image stats to PCA function.''' data = self.data stats = spy.calc_stats(data) xdata = spy.principal_components(stats).transform(data) def test_orthogonalize(self): '''Can correctly create an orthogonal basis from vectors.''' x = np.linspace(0, np.pi, 1001) # Create sin and cos vectors of unit length sin_h = np.sin(x) sin_h /= np.linalg.norm(sin_h) cos_h = np.cos(x) cos_h /= np.linalg.norm(cos_h) X = np.array([50 * sin_h, 75 * cos_h]) Y = spy.orthogonalize(X) assert(np.allclose(Y.dot(Y.T), np.array([[1, 0], [0, 1]]))) assert(np.allclose(X.dot(Y.T), np.array([[50, 0], [0, 75]]))) def test_orthogonalize_subset(self): '''Can correctly create an orthogonal basis from vector subset.''' x = np.linspace(0, np.pi, 1001) # Create sin and cos vectors of unit length sin_h = np.sin(x) sin_h /= np.linalg.norm(sin_h) cos_h = np.cos(x) cos_h /= np.linalg.norm(cos_h) # First vector in X will already be a unit vector X = np.array([sin_h, 75 * cos_h]) Y = spy.orthogonalize(X, start=1) assert(np.allclose(Y.dot(Y.T), np.array([[1, 0], [0, 1]]))) assert(np.allclose(X.dot(Y.T), np.array([[1, 0], [0, 75]]))) def run(): print('\n' + '-' * 72) print('Running dimensionality tests.') print('-' * 72) test = DimensionalityTest() test.run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/envi.py000066400000000000000000000271211412674721200177170ustar00rootroot00000000000000''' Runs unit tests of functions associated with the ENVI file format. To run the unit tests, type the following from the system command line: # python -m spectral.tests.envi ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np from numpy.testing import assert_almost_equal import os import spectral as spy from spectral.io.envi import SpectralLibrary from spectral.tests import testdir from spectral.tests.spytest import SpyTest MIXED_CASE_HEADER = '''ENVI samples = 145 lines = 145 bands = 220 header offset = 0 file type = ENVI Standard data type = 4 interleave = bip byte order = 0 some Param = 0 ''' class ENVIWriteTest(SpyTest): '''Tests that SpyFile memmap interfaces read and write properly.''' def __init__(self): pass def setup(self): if not os.path.isdir(testdir): os.makedirs(testdir) def test_save_image_ndarray(self): '''Test saving an ENVI formated image from a numpy.ndarray.''' (R, B, C) = (10, 20, 30) (r, b, c) = (3, 8, 23) datum = 33 data = np.zeros((R, B, C), dtype=np.uint16) data[r, b, c] = datum fname = os.path.join(testdir, 'test_save_image_ndarray.hdr') spy.envi.save_image(fname, data, interleave='bil') img = spy.open_image(fname) assert_almost_equal(img[r, b, c], datum) def test_save_image_ndarray_no_ext(self): '''Test saving an ENVI formated image with no image file extension.''' data = np.arange(1000, dtype=np.int16).reshape(10, 10, 10) base = os.path.join(testdir, 'test_save_image_ndarray_noext') hdr_file = base + '.hdr' spy.envi.save_image(hdr_file, data, ext='') rdata = spy.open_image(hdr_file).load() assert(np.all(data==rdata)) def test_save_image_ndarray_alt_ext(self): '''Test saving an ENVI formated image with alternate extension.''' data = np.arange(1000, dtype=np.int16).reshape(10, 10, 10) base = os.path.join(testdir, 'test_save_image_ndarray_alt_ext') hdr_file = base + '.hdr' ext = '.foo' img_file = base + ext spy.envi.save_image(hdr_file, data, ext=ext) rdata = spy.envi.open(hdr_file, img_file).load() assert(np.all(data==rdata)) def test_save_image_spyfile(self): '''Test saving an ENVI formatted image from a SpyFile object.''' (r, b, c) = (3, 8, 23) fname = os.path.join(testdir, 'test_save_image_spyfile.hdr') src = spy.open_image('92AV3C.lan') spy.envi.save_image(fname, src) img = spy.open_image(fname) assert_almost_equal(src[r, b, c], img[r, b, c]) def test_create_image_metadata(self): '''Test calling `envi.create_image` using a metadata dict.''' (R, B, C) = (10, 20, 30) (r, b, c) = (3, 8, 23) offset = 1024 datum = 33 md = {'lines': R, 'samples': B, 'bands': C, 'interleave': 'bsq', 'header offset': offset, 'data type': 12, 'USER DEFINED': 'test case insensitivity'} fname = os.path.join(testdir, 'test_create_image_metadata.hdr') img = spy.envi.create_image(fname, md) mm = img.open_memmap(writable=True) mm.fill(0) mm[r, b, c] = datum mm.flush() img = spy.open_image(fname) img._disable_memmap() assert_almost_equal(img[r, b, c], datum) assert(img.offset == offset) for key in md: assert key.lower() in img.metadata assert str(md[key]) == img.metadata[key.lower()] def test_create_image_keywords(self): '''Test calling `envi.create_image` using keyword args.''' (R, B, C) = (10, 20, 30) (r, b, c) = (3, 8, 23) offset = 1024 datum = 33 fname = os.path.join(testdir, 'test_create_image_keywords.hdr') img = spy.envi.create_image(fname, shape=(R,B,C), interleave='bsq', dtype=np.uint16, offset=offset) mm = img.open_memmap(writable=True) mm.fill(0) mm[r, b, c] = datum mm.flush() img = spy.open_image(fname) img._disable_memmap() assert_almost_equal(img[r, b, c], datum) assert(img.offset == offset) def test_save_invalid_dtype_fails(self): '''Should not be able to write unsupported data type to file.''' from spectral.io.envi import EnviDataTypeError a = np.random.randint(0, 200, 900).reshape((30, 30)).astype(np.int8) fname = os.path.join(testdir, 'test_save_invalid_dtype_fails.hdr') try: spy.envi.save_image('invalid.hdr', a) except EnviDataTypeError as e: pass else: raise Exception('Expected EnviDataTypeError to be raised.') def test_save_load_classes(self): '''Verify that `envi.save_classification` saves data correctly.''' fname = os.path.join(testdir, 'test_save_load_classes.hdr') gt = spy.open_image('92AV3GT.GIS').read_band(0) spy.envi.save_classification(fname, gt, dtype=np.uint8) gt2 = spy.open_image(fname).read_band(0) assert(np.all(gt == gt2)) def test_open_nonzero_frame_offset_fails(self): '''Opening files with nonzero frame offsets should fail.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'test_open_nonzero_frame_offset_fails.hdr') spy.envi.save_image(fname, img) fout = open(fname, 'a') fout.write('major frame offsets = 128\n') fout.close() try: img2 = spy.envi.open(fname) except spy.envi.EnviFeatureNotSupported: pass else: raise Exception('File erroneously opened.') def test_open_zero_frame_offset_passes(self): '''Files with frame offsets set to zero should open.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'test_open_zero_frame_offset_passes.hdr') spy.envi.save_image(fname, img) fout = open(fname, 'a') fout.write('major frame offsets = 0\n') fout.write('minor frame offsets = {0, 0}\n') fout.close() img2 = spy.envi.open(fname) def test_save_nonzero_frame_offset_fails(self): '''Opening files with nonzero frame offsets should fail.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'test_save_nonzero_frame_offset_fails.hdr') meta = {'major frame offsets' : [128, 0]} try: spy.envi.save_image(fname, img, metadata=meta) except spy.envi.EnviFeatureNotSupported: pass else: raise Exception('File erroneously saved.') def test_save_zero_frame_offset_passes(self): '''Opening files with nonzero frame offsets should fail.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'test_save_zero_frame_offset_passes.hdr') meta = {'major frame offsets' : 0} spy.envi.save_image(fname, img, metadata=meta) def test_catch_parse_error(self): '''Failure to parse parameters should raise EnviHeaderParsingError.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'test_catch_parse_error.hdr') spy.envi.save_image(fname, img) fout = open(fname, 'a') fout.write('foo = {{\n') fout.close() try: img2 = spy.envi.open(fname) except spy.envi.EnviHeaderParsingError: pass else: raise Exception('Failed to raise EnviHeaderParsingError') def test_header_missing_mandatory_parameter_fails(self): '''Missing mandatory parameter should raise EnviMissingHeaderParameter.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'test_missing_param_fails.hdr') spy.envi.save_image(fname, img) lines = [line for line in open(fname).readlines() \ if 'bands' not in line] fout = open(fname, 'w') for line in lines: fout.write(line) fout.close() try: img2 = spy.envi.open(fname) except spy.envi.MissingEnviHeaderParameter: pass else: raise Exception('Failed to raise EnviMissingHeaderParameter') def test_param_name_converted_to_lower_case(self): '''By default, parameter names are converted to lower case.''' header = 'mixed_case_header.hdr' open(header, 'w').write(MIXED_CASE_HEADER) h = spy.envi.read_envi_header(header) assert('some param' in h) def test_support_nonlowercase_params(self): '''By default, parameter names are converted to lower case.''' header = 'mixed_case_header.hdr' open(header, 'w').write(MIXED_CASE_HEADER) orig = spy.settings.envi_support_nonlowercase_params try: spy.settings.envi_support_nonlowercase_params = True h = spy.envi.read_envi_header(header) finally: spy.settings.envi_support_nonlowercase_params = orig assert('some Param' in h) def test_missing_ENVI_in_header_fails(self): '''FileNotAnEnviHeader should be raised if "ENVI" not on first line.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'test_header_missing_ENVI_fails.hdr') spy.envi.save_image(fname, img) lines = open(fname).readlines() fout = open(fname, 'w') for line in lines[1:]: fout.write(line) fout.close() try: img2 = spy.envi.open(fname) except spy.envi.FileNotAnEnviHeader: pass else: raise Exception('Failed to raise EnviMissingHeaderParameter') def test_open_missing_data_raises_envidatafilenotfounderror(self): '''EnviDataFileNotFound should be raise if data file is not found.''' img = spy.open_image('92AV3C.lan') fname = os.path.join(testdir, 'header_without_data.hdr') spy.envi.save_image(fname, img, ext='.img') os.unlink(os.path.splitext(fname)[0] + '.img') try: img2 = spy.envi.open(fname) except spy.envi.EnviDataFileNotFoundError: pass else: raise Exception('Expected EnviDataFileNotFoundError') def test_create_spectral_lib_with_header(self): '''Can create ENVI spectral library from numpy array with bands.''' img = spy.open_image('92AV3C.lan') (nrows, ncols, nbands) = img.shape header = {'wavelength': np.arange(nbands).astype(np.float32)} slib = SpectralLibrary(img[0, :20, :].squeeze(), header) basename = os.path.join(testdir, 'slib') slib.save(basename) slib = spy.envi.open(basename + '.hdr') assert(slib.spectra.shape == (20, nbands)) def test_create_spectral_lib_without_header(self): '''Can create ENVI spectral library from numpy array without bands.''' img = spy.open_image('92AV3C.lan') (nrows, ncols, nbands) = img.shape slib = SpectralLibrary(img[0, :20, :].squeeze()) basename = os.path.join(testdir, 'slib') slib.save(basename) slib = spy.envi.open(basename + '.hdr') assert(slib.spectra.shape == (20, nbands)) def run(): print('\n' + '-' * 72) print('Running ENVI tests.') print('-' * 72) write_test = ENVIWriteTest() write_test.run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/iterators.py000066400000000000000000000072441412674721200207760ustar00rootroot00000000000000''' Runs unit tests for iterators. To run the unit tests, type the following from the system command line: # python -m spectral.tests.iterators ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np from numpy.testing import assert_allclose import spectral as spy from spectral.algorithms.algorithms import iterator, iterator_ij from spectral.tests.spytest import SpyTest class IteratorTest(SpyTest): '''Tests various math functions.''' def setup(self): self.image = spy.open_image('92AV3C.lan') self.gt = spy.open_image('92AV3GT.GIS').read_band(0) def test_iterator_all(self): '''Iteration over all pixels.''' data = self.image.load() pixels = data.reshape((-1, data.shape[-1])) sum = np.sum(pixels, 0) itsum = np.sum(np.array([x for x in iterator(data)]), 0) assert_allclose(sum, itsum) def test_iterator_nonzero(self): '''Iteration over all non-background pixels.''' data = self.image.load() classes = self.gt.ravel() pixels = data.reshape((-1, data.shape[-1])) sum = np.sum(pixels[classes > 0], 0) itsum = np.sum(np.array([x for x in iterator(data, self.gt)]), 0) assert_allclose(sum, itsum) def test_iterator_index(self): '''Iteration over single ground truth index''' cls = 5 data = self.image.load() classes = self.gt.ravel() pixels = data.reshape((-1, data.shape[-1])) sum = np.sum(pixels[classes == cls], 0) itsum = np.sum(np.array([x for x in iterator(data, self.gt, cls)]), 0) assert_allclose(sum, itsum) def test_iterator_ij_nonzero(self): '''Iteration over all non-background pixels.''' data = self.image.load() classes = self.gt.ravel() pixels = data.reshape((-1, data.shape[-1])) sum = np.sum(pixels[classes > 0], 0) itsum = np.sum(np.array([data[ij] for ij in iterator_ij(self.gt)]), 0) assert_allclose(sum, itsum) def test_iterator_ij_index(self): '''Iteration over single ground truth index''' cls = 5 data = self.image.load() classes = self.gt.ravel() pixels = data.reshape((-1, data.shape[-1])) sum = np.sum(pixels[classes == cls], 0) itsum = np.sum(np.array([data[ij] for ij in iterator_ij(self.gt, cls)]), 0) assert_allclose(sum, itsum) def test_iterator_spyfile(self): '''Iteration over SpyFile object for single ground truth index''' cls = 5 data = self.image.load() classes = self.gt.ravel() pixels = data.reshape((-1, data.shape[-1])) sum = np.sum(pixels[classes == cls], 0) itsum = np.sum(np.array([x for x in iterator(self.image, self.gt, cls)]), 0) assert_allclose(sum, itsum) def test_iterator_spyfile_nomemmap(self): '''Iteration over SpyFile object without memmap''' cls = 5 data = self.image.load() classes = self.gt.ravel() pixels = data.reshape((-1, data.shape[-1])) sum = np.sum(pixels[classes == cls], 0) image = spy.open_image('92AV3C.lan') itsum = np.sum(np.array([x for x in iterator(image, self.gt, cls)]), 0) assert_allclose(sum, itsum) def run(): print('\n' + '-' * 72) print('Running iterator tests.') print('-' * 72) test = IteratorTest() test.run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/memmap.py000066400000000000000000000131451412674721200202330ustar00rootroot00000000000000''' Runs unit tests of image file interfaces using numpy memmaps. The unit tests in this module assume the example file "92AV3C.lan" is in the spectral data path. After the file is opened it is saved in various formats (different combinations of byte order, interleave, and data type) and for each file written, the memmap interfaces are tested. To run the unit tests, type the following from the system command line: # python -m spectral.tests.memmap ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np from numpy.testing import assert_almost_equal import spectral as spy from spectral.io.spyfile import find_file_path, FileNotFoundError from spectral.tests import testdir from spectral.tests.spytest import SpyTest class SpyFileMemmapTest(SpyTest): '''Tests that SpyFile memmap interfaces read and write properly.''' def __init__(self, file, datum, value, src_inter): ''' Arguments: `file` (str or `SpyFile`): The SpyFile to be tested. This can be either the name of the file or a SpyFile object that has already been opened. `datum` (3-tuple of ints): (i, j, k) are the row, column and band of the datum to be tested. 'i' and 'j' should be at least 10 pixels away from the edge of the associated image and `k` should have at least 10 bands above and below it in the image. `value` (int or float): The scalar value associated with location (i, j, k) in the image. `src_inter` (str): Interleave with which to save the source file ''' self.file = file self.datum = tuple(datum) self.value = value self.src_inter = src_inter def setup(self): self.create_test_image_file() def create_test_image_file(self): import os import spectral img = spy.open_image(self.file) fname = os.path.join(testdir, 'memmap_test_%s.hdr' % self.src_inter) spy.envi.save_image(fname, img, dtype = img.dtype, interleave = self.src_inter, force=True) self.image = spy.open_image(fname) def test_spyfile_has_memmap(self): assert(self.image.using_memmap == True) def test_bip_memmap_read(self): self.create_test_image_file() (i, j, k) = self.datum mm = self.image.open_memmap(interleave='bip') assert_almost_equal(mm[i, j, k], self.value) def test_bil_memmap_read(self): self.create_test_image_file() (i, j, k) = self.datum mm = self.image.open_memmap(interleave='bil') assert_almost_equal(mm[i, k, j], self.value) def test_bsq_memmap_read(self): self.create_test_image_file() (i, j, k) = self.datum mm = self.image.open_memmap(interleave='bsq') assert_almost_equal(mm[k, i, j], self.value) def test_bip_memmap_write(self): from spectral import open_image self.create_test_image_file() (i, j, k) = self.datum mm = self.image.open_memmap(interleave='bip', writable=True) mm[i, j, k] = 2 * self.value mm.flush() assert_almost_equal(self.image.open_memmap()[i, j, k], 2 * self.value) def test_bil_memmap_write(self): from spectral import open_image self.create_test_image_file() (i, j, k) = self.datum mm = self.image.open_memmap(interleave='bil', writable=True) mm[i, k, j] = 3 * self.value mm.flush() assert_almost_equal(self.image.open_memmap()[i, j, k], 3 * self.value) def test_bsq_memmap_write(self): from spectral import open_image self.create_test_image_file() (i, j, k) = self.datum mm = self.image.open_memmap(interleave='bsq', writable=True) mm[k, i, j] = 3 * self.value mm.flush() assert_almost_equal(self.image.open_memmap()[i, j, k], 3 * self.value) class SpyFileMemmapTestSuite(object): def __init__(self, filename, datum, value): ''' Arguments: `filename` (str): Name of the image file to be tested. `datum` (3-tuple of ints): (i, j, k) are the row, column and band of the datum to be tested. 'i' and 'j' should be at least 10 pixels away from the edge of the associated image and `k` should have at least 10 bands above and below it in the image. `value` (int or float): The scalar value associated with location (i, j, k) in the image. ''' self.filename = filename self.datum = datum self.value = value def run(self): import os import itertools import spectral print('\n' + '-' * 72) print('Running memmap tests.') print('-' * 72) if not os.path.isdir(testdir): os.mkdir(testdir) interleaves = ('bil', 'bip', 'bsq') for inter in interleaves: print('Testing memmaps with %s image file.' % inter.upper()) test = SpyFileMemmapTest(self.filename, self.datum, self.value, inter) test.run() def run(): suite = SpyFileMemmapTestSuite('92AV3C.lan', (30, 40, 50), 5420.0) suite.run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/run.py000066400000000000000000000025321412674721200175610ustar00rootroot00000000000000''' Runs a set of unit tests for the spectral package. To run all unit tests, type the following from the system command line: # python -m spectral.tests.run ''' from __future__ import absolute_import, division, print_function, unicode_literals import logging from optparse import OptionParser import spectral.tests def parse_args(): parser = OptionParser() parser.add_option('-c', '--continue', dest='continue_tests', action='store_true', default=False, help='Continue with remaining tests after a ' 'failed test.') (options, args) = parser.parse_args() spectral.tests.abort_on_fail = not options.continue_tests def reset_stats(): spectral.tests._num_tests_run = 0 spectral.tests._num_tests_failed = 0 def print_summary(): if spectral.tests._num_tests_failed > 0: msg = '%d of %d tests FAILED.' % (spectral.tests._num_tests_failed, spectral.tests._num_tests_run) else: msg = 'All %d tests PASSED!' % spectral.tests._num_tests_run print('\n' + '-' * 72) print(msg) print('-' * 72) if __name__ == '__main__': logging.getLogger('spectral').setLevel(logging.ERROR) parse_args() reset_stats() for test in spectral.tests.all_tests: test.run() print_summary() spectral-0.22.4/spectral/tests/spatial.py000066400000000000000000000155201412674721200204130ustar00rootroot00000000000000''' Runs unit tests for various SPy spatial functions. To run the unit tests, type the following from the system command line: # python -m spectral.tests.spatial ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np from numpy.testing import assert_allclose from warnings import warn import spectral as spy from spectral.algorithms.spatial import (get_window_bounds, get_window_bounds_clipped, map_class_ids, map_classes, map_window) from spectral.tests.spytest import SpyTest class SpatialWindowTest(SpyTest): '''Tests various spatial functions.''' def setup(self): self.data = spy.open_image('92AV3C.lan').load() def test_get_window_bounds(self): assert(get_window_bounds(90, 90, 3, 7, 30, 40) == (29, 32, 37, 44)) def test_get_window_bounds_border(self): assert(get_window_bounds(90, 90, 3, 7, 0, 2) == (0, 3, 0, 7)) def test_get_window_bounds_clipped(self): assert(get_window_bounds_clipped(90, 90, 3, 7, 30, 40) \ == (29, 32, 37, 44)) def test_get_window_bounds_clipped_border(self): assert(get_window_bounds_clipped(90, 90, 3, 7, 0, 2) == (0, 2, 0, 6)) def test_map_window(self): '''Test computing spectra average over local window.''' f = lambda X, ij: np.mean(X.reshape((-1, X.shape[-1])), axis=0) X = self.data y = map_window(f, X, (3, 5), (10, 50), (20, 40)) t = np.mean(X[9:12, 18:23].reshape((-1, X.shape[-1])), axis=0) assert_allclose(y[0, 0], t) def test_map_window_clipped(self): '''Test spatial averaging near border with clipped window.''' f = lambda X, ij: np.mean(X.reshape((-1, X.shape[-1])), axis=0) X = self.data y = map_window(f, X, (3, 5), (100, None), (100, None), border='clip') t = np.mean(X[-2:, -3:].reshape((-1, X.shape[-1])), axis=0) assert_allclose(y[-1, -1], t) def test_map_window_shifted(self): '''Test spatial averaging near border with shifted window.''' f = lambda X, ij: np.mean(X.reshape((-1, X.shape[-1])), axis=0) X = self.data y = map_window(f, X, (3, 5), (100, None), (100, None), border='shift') t = np.mean(X[-3:, -5:].reshape((-1, X.shape[-1])), axis=0) assert_allclose(y[-1, -1], t) def test_map_window_stepped(self): '''Test spatial averaging with non-unity row/column step sizes.''' f = lambda X, ij: np.mean(X.reshape((-1, X.shape[-1])), axis=0) X = self.data y = map_window(f, X, (3, 5), (30, 60, 3), (70, 100, 4), border='shift') t = np.mean(X[32:35, 72:77].reshape((-1, X.shape[-1])), axis=0) assert_allclose(y[1, 1], t) class MapClassesTest(SpyTest): '''Test mapping of class indices between classification images.''' def setup(self): self.gt = spy.open_image('92AV3GT.GIS').read_band(0) def test_map_class_ids_identity(self): '''Mapping a class image back to itself should yield identity map.''' gt = np.array(self.gt) d = map_class_ids(gt, gt) for i in set(gt.ravel()): assert(i in d) for (i, j) in d.items(): assert(j == i) def test_map_class_ids_identity_unlabeled(self): '''Mapping a class image back to itself with an unlabeled class.''' gt = np.array(self.gt) d = map_class_ids(gt, gt, unlabeled=0) for i in set(gt.ravel()): assert(i in d) for (i, j) in d.items(): assert(j == i) def test_map_class_ids_identity_multiple_unlabeled(self): '''Mapping a class image back to itself with unlabeled classes.''' gt = np.array(self.gt) d = map_class_ids(gt, gt, unlabeled=[2, 4]) for i in set(gt.ravel()): assert(i in d) for (i, j) in d.items(): assert(j == i) def test_map_class_ids_isomorphic(self): '''Test map_class_ids with isomorphic classes.''' gt = np.array(self.gt) gt2 = gt + 1 d = map_class_ids(gt, gt2) for (i, j) in d.items(): assert(j == i + 1) def test_map_class_ids_isomorphic_background(self): '''Test map_class_ids with isomorphic classes and background arg.''' gt = np.array(self.gt) gt2 = gt + 1 d = map_class_ids(gt, gt2, unlabeled=0) assert(d[0] == 0) d.pop(0) for (i, j) in d.items(): assert(j == i + 1) def test_map_class_ids_src_gt_dest(self): '''Test map_class_ids with more classes in source image.''' gt = np.array(self.gt) (i, j) = (100, 30) old_label = gt[i, j] new_label = max(set(gt.ravel())) + 10 gt2 = np.array(gt) gt2[i, j] = new_label d = map_class_ids(gt2, gt) # There are enough pixels for each class that a new single-pixel class # should not be mapped to one of the existing classes. assert(d[new_label] not in gt) d.pop(new_label) for (i, j) in d.items(): assert(j == i) def test_map_class_ids_dest_gt_src(self): '''Test map_class_ids with more classes in dest image.''' gt = np.array(self.gt) (i, j) = (100, 30) old_label = gt[i, j] new_label = max(set(gt.ravel())) + 10 gt2 = np.array(gt) gt2[i, j] = new_label d = map_class_ids(gt, gt2) for (i, j) in d.items(): assert(j == i) def test_map_classes_isomorphic(self): '''map_classes should map isomorphic class image back to original.''' gt = np.array(self.gt) gt2 = gt + 1 d = map_class_ids(gt2, gt) result = map_classes(gt2, d) assert(np.alltrue(result == gt)) def test_map_fails_allow_unmapped_false(self): '''map_classes should raise ValueError if image has unmapped value.''' gt = np.array(self.gt) gt2 = gt + 1 d = map_class_ids(gt2, gt) d.pop(1) try: result = map_classes(gt2, d) except ValueError: pass else: assert(False) def test_map_allow_unmapped_true(self): '''map_classes should raise ValueError if image has unmapped value.''' gt = np.array(self.gt) gt2 = gt + 1 d = map_class_ids(gt2, gt) d.pop(1) result = map_classes(gt2, d, allow_unmapped=True) assert(np.alltrue(result[gt2 == 1] == 1)) def run(): print('\n' + '-' * 72) print('Running spatial tests.') print('-' * 72) for T in [SpatialWindowTest, MapClassesTest]: T().run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/spyfile.py000066400000000000000000000306001412674721200204250ustar00rootroot00000000000000''' Runs unit tests of spectral file I/O functions. The unit tests in this module assume the example file "92AV3C.lan" is in the spectral data path. After the file is opened it is saved in various formats (different combinations of byte order, interleave, and data type) and for each file written, the new file is opened and known data values are read and checked to verify they are read properly. To run the unit tests, type the following from the system command line: # python -m spectral.tests.spyfile ''' from __future__ import division, print_function, unicode_literals import itertools import numpy as np import os import spectral as spy from spectral.io.spyfile import find_file_path, FileNotFoundError, SpyFile from spectral.tests import testdir from spectral.tests.spytest import SpyTest def assert_almost_equal(a, b, **kwargs): if not np.allclose(a, b, **kwargs): raise Exception('NOPE') class SpyFileTest(SpyTest): '''Tests that SpyFile methods read data correctly from files.''' def __init__(self, file, datum, value): ''' Arguments: `file` (str or `SpyFile`): The SpyFile to be tested. This can be either the name of the file or a SpyFile object that has already been opened. `datum` (3-tuple of ints): (i, j, k) are the row, column and band of the datum to be tested. 'i' and 'j' should be at least 10 pixels away from the edge of the associated image and `k` should have at least 10 bands above and below it in the image. `value` (int or float): The scalar value associated with location (i, j, k) in the image. ''' self.file = file self.datum = datum self.value = value def setup(self): if isinstance(self.file, SpyFile): self.image = self.file else: self.image = spy.open_image(self.file) def test_read_datum(self): assert_almost_equal(self.image.read_datum(*self.datum, use_memmap=True), self.value) assert_almost_equal(self.image.read_datum(*self.datum, use_memmap=False), self.value) def test_read_pixel(self): (i, j, k) = self.datum assert_almost_equal(self.image.read_pixel(i, j, use_memmap=True)[k], self.value) assert_almost_equal(self.image.read_pixel(i, j, use_memmap=False)[k], self.value) def test_read_band(self): (i, j, k) = self.datum assert_almost_equal(self.image.read_band(k, use_memmap=True)[i, j], self.value) assert_almost_equal(self.image.read_band(k, use_memmap=False)[i, j], self.value) def test_read_bands(self): (i, j, k) = self.datum bands = (k - 5, k - 2, k, k + 1) assert_almost_equal(self.image.read_bands(bands, use_memmap=True)[i, j, 2], self.value) assert_almost_equal(self.image.read_bands(bands, use_memmap=False)[i, j, 2], self.value) def test_read_bands_nonascending(self): (i, j, k) = self.datum bands = (k - 2, k + 1, k, k - 5) assert_almost_equal(self.image.read_bands(bands, use_memmap=True)[i, j, 2], self.value) assert_almost_equal(self.image.read_bands(bands, use_memmap=False)[i, j, 2], self.value) def test_read_bands_duplicates(self): (i, j, k) = self.datum bands = (k - 5, k - 5, k, k -5) assert_almost_equal(self.image.read_bands(bands, use_memmap=True)[i, j, 2], self.value) assert_almost_equal(self.image.read_bands(bands, use_memmap=False)[i, j, 2], self.value) def test_read_subregion(self): (i, j, k) = self.datum region = self.image.read_subregion((i - 5, i + 9), (j - 3, j + 4), use_memmap=True) assert_almost_equal(region[5, 3, k], self.value) region = self.image.read_subregion((i - 5, i + 9), (j - 3, j + 4), use_memmap=False) assert_almost_equal(region[5, 3, k], self.value) def test_read_subimage(self): (i, j, k) = self.datum subimage = self.image.read_subimage([0, 3, i, 5], [1, j, 4, 7], [3, 7, k], use_memmap=True) assert_almost_equal(subimage[2, 1, 2], self.value) subimage = self.image.read_subimage([0, 3, i, 5], [1, j, 4, 7], [3, 7, k], use_memmap=False) assert_almost_equal(subimage[2, 1, 2], self.value) subimage = self.image.read_subimage([0, 3, i, 5], [1, j, 4, 7], use_memmap=True) assert_almost_equal(subimage[2, 1, k], self.value) subimage = self.image.read_subimage([0, 3, i, 5], [1, j, 4, 7], use_memmap=False) assert_almost_equal(subimage[2, 1, k], self.value) def test_load(self): (i, j, k) = self.datum data = self.image.load() spyf = self.image load_assert = np.allclose load_assert(data[i, j, k], self.value) first_band = spyf[:, :, 0] load_assert(data[:, :, 0], first_band) # This is checking if different ImageArray and SpyFile indexing # results are the same shape, so we can't just reuse the already # loaded first band. load_assert(data[:, 0, 0], spyf[:, 0, 0]) load_assert(data[0, 0, 0], spyf[0, 0, 0]) load_assert(data[0, 0], spyf[0, 0]) load_assert(data[-1, -1, -1], spyf[-1, -1, -1]) load_assert(data[-1, -3:-1], spyf[-1, -3:-1]) load_assert(data[(6, 25)], spyf[(6, 25)]) # The following test would currently fail, because # SpyFile.__get_item__ treats [6,25] the same as (6,25). # load_assert(data[[6, 25]], # spyf[[6, 25]]) load_assert(data.read_band(0), spyf.read_band(0)) load_assert(data.read_bands([0, 1]), spyf.read_bands([0, 1])) load_assert(data.read_pixel(1, 2), spyf.read_pixel(1, 2)) load_assert(data.read_subregion([0, 3], [1, 2]), spyf.read_subregion([0, 3], [1, 2])) load_assert(data.read_subregion([0, 3], [1, 2], [0, 1]), spyf.read_subregion([0, 3], [1, 2], [0, 1])) load_assert(data.read_subimage([0, 2, 4], [6, 3]), spyf.read_subimage([0, 2, 4], [6, 3])) load_assert(data.read_subimage([0, 2], [6, 3], [0, 1]), spyf.read_subimage([0, 2], [6, 3], [0, 1])) load_assert(data.read_datum(1,2,8), spyf.read_datum(1,2,8)) ufunc_result = data + 1 assert isinstance(ufunc_result, np.ndarray) assert not isinstance(ufunc_result, type(data)) non_ufunc_result = data.diagonal() assert isinstance(non_ufunc_result, np.ndarray) assert not isinstance(non_ufunc_result, type(data)) def test_getitem_i_j_k(self): (i, j, k) = self.datum assert_almost_equal(self.image[i, j, k], self.value) def test_getitem_i_j(self): (i, j, k) = self.datum assert_almost_equal(self.image[i, j][k], self.value) def test_getitem_i_j_kslice(self): (i, j, k) = self.datum assert_almost_equal(self.image[i, j, k-2:k+3:2][0, 0, 1], self.value) def test_getitem_islice_jslice(self): (i, j, k) = self.datum assert_almost_equal(self.image[i-3:i+3, j-3:j+3][3, 3, k], self.value) def assert_same_shape_almost_equal(obj1, obj2, decimal=7, err_msg='', verbose=True): """ Assert that two objects are almost equal and have the same shape. numpy.testing.assert_almost_equal does test for shape, but considers arrays with one element and a scalar to be the same. """ # Types might be different since ImageArray stores things as # floats by default. if np.isscalar(obj1): assert np.isscalar(obj2), err_msg else: assert obj1.shape == obj2.shape, err_msg assert_almost_equal(obj1, obj2, decimal=decimal, err_msg=err_msg, verbose=verbose) class SpyFileTestSuite(object): '''Tests reading by byte orders, data types, and interleaves. For a specified image file name, the test suite will verify proper reading of data for various combinations of data type, interleave (BIL, BIP, BSQ), and byte order (little- and big-endian). A new file is created for each combination of parameters for testing. ''' def __init__(self, filename, datum, value, **kwargs): ''' Arguments: `filename` (str): Name of the image file to be tested. `datum` (3-tuple of ints): (i, j, k) are the row, column and band of the datum to be tested. 'i' and 'j' should be at least 10 pixels away from the edge of the associated image and `k` should have at least 10 bands above and below it in the image. `value` (int or float): The scalar value associated with location (i, j, k) in the image. Keyword Arguments: `dtypes` (tuple of numpy dtypes): The file will be tested for all of the dtypes given. If not specified, only float32 an float64 will be tested. ''' self.filename = filename self.datum = datum self.value = value self.dtypes = kwargs.get('dtypes', ('f4', 'f8')) self.dtypes = [np.dtype(d).name for d in self.dtypes] def run(self): print('\n' + '-' * 72) print('Running SpyFile read tests.') print('-' * 72) if not os.path.isdir(testdir): os.mkdir(testdir) image = spy.open_image(self.filename) basename = os.path.join(testdir, os.path.splitext(self.filename)[0]) interleaves = ('bil', 'bip', 'bsq') ends = ('big', 'little') cases = itertools.product(interleaves, self.dtypes, ends) for (inter, dtype, endian) in cases: fname = '%s_%s_%s_%s.hdr' % (basename, inter, dtype, endian) spy.envi.save_image(fname, image, interleave=inter, dtype=dtype, byteorder=endian) msg = 'Running SpyFile read tests on %s %s %s-endian file ' \ % (inter.upper(), np.dtype(dtype).name, endian) testimg = spy.open_image(fname) if testimg.using_memmap is True: print('\n' + '-' * 72) print(msg + 'using memmap...') print('-' * 72) test = SpyFileTest(testimg, self.datum, self.value) test.run() print('\n' + '-' * 72) print(msg + 'without memmap...') print('-' * 72) testimg._disable_memmap() test = SpyFileTest(testimg, self.datum, self.value) test.run() else: print('\n' + '-' * 72) print(msg + 'without memmap...') print('-' * 72) test = SpyFileTest(testimg, self.datum, self.value) test.run() def run(): tests = [('92AV3C.lan', (99, 99, 99), 2057.0)] # tests = [('92AV3C.lan', (99, 99, 99), 2057.0), # ('f970619t01p02_r02_sc04.a.rfl', (99, 99, 99), 0.2311), # ('cup95eff.int.hdr', (99, 99, 33), 0.1842)] for (fname, datum, value) in tests: try: check = find_file_path(fname) suite = SpyFileTestSuite(fname, datum, value, dtypes=('i2', 'i4', 'f4', 'f8')) suite.run() except FileNotFoundError: print('File "%s" not found. Skipping.' % fname) if __name__ == '__main__': run() spectral-0.22.4/spectral/tests/spymath.py000066400000000000000000000072111412674721200204410ustar00rootroot00000000000000''' Runs unit tests for various SPy math functions. To run the unit tests, type the following from the system command line: # python -m spectral.tests.spymath ''' from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np from numpy.testing import assert_allclose import spectral as spy from spectral.algorithms.spymath import matrix_sqrt from spectral.tests.spytest import SpyTest class SpyMathTest(SpyTest): '''Tests various math functions.''' def setup(self): self.data = spy.open_image('92AV3C.lan').open_memmap() self.C = spy.calc_stats(self.data).cov self.X = np.array([[2., 1.],[1., 2.]]) def test_matrix_sqrt(self): S = matrix_sqrt(self.X) assert_allclose(S.dot(S), self.X) def test_matrix_sqrt_inv(self): S = matrix_sqrt(self.X, inverse=True) assert_allclose(S.dot(S), np.linalg.inv(self.X)) def test_matrix_sqrt_sym(self): S = matrix_sqrt(self.C, symmetric=True) assert_allclose(S.dot(S), self.C, atol=1e-8) def test_matrix_sqrt_sym_inv(self): S = matrix_sqrt(self.C, symmetric=True, inverse=True) assert_allclose(S.dot(S), np.linalg.inv(self.C), atol=1e-8) def test_matrix_sqrt_eigs(self): stats = spy.calc_stats(self.data) (evals, evecs) = np.linalg.eig(stats.cov) S = matrix_sqrt(eigs=(evals, evecs)) assert_allclose(S.dot(S), self.C, atol=1e-8) def test_stats_property_sqrt_cov(self): stats = spy.calc_stats(self.data) s = stats.sqrt_cov.dot(stats.sqrt_cov) assert_allclose(s, stats.cov, atol=1e-8) def test_stats_property_sqrt_inv_cov(self): stats = spy.calc_stats(self.data) s = stats.sqrt_inv_cov.dot(stats.sqrt_inv_cov) assert_allclose(s, stats.inv_cov, atol=1e-8) def test_whiten_data(self): '''Test that whitening transform produce unit diagonal covariance.''' stats = spy.calc_stats(self.data) wdata = stats.get_whitening_transform()(self.data) wstats = spy.calc_stats(wdata) assert_allclose(wstats.cov, np.eye(wstats.cov.shape[0]), atol=1e-8) class PCATest(SpyTest): '''Tests Principal Components transformation.''' def setup(self): self.data = spy.open_image('92AV3C.lan').open_memmap() self.pc = spy.principal_components(self.data) def test_evals_sorted(self): '''Eigenvalues should be sorted in descending order.''' assert(np.alltrue(np.diff(self.pc.eigenvalues) <= 0)) def test_evecs_orthonormal(self): '''Eigenvectors should be orthonormal.''' evecs = self.pc.eigenvectors assert(np.allclose(evecs.T.dot(evecs), np.eye(evecs.shape[0]))) class LDATest(SpyTest): '''Tests various math functions.''' def setup(self): self.data = spy.open_image('92AV3C.lan').open_memmap() self.classes = spy.open_image('92AV3GT.GIS').read_band(0) def test_lda_covw_whitened(self): '''cov_w should be whitened in the transformed space.''' classes = spy.create_training_classes(self.data, self.classes) fld = spy.linear_discriminant(classes) xdata = fld.transform(self.data) classes.transform(fld.transform) fld2 = spy.linear_discriminant(classes) assert_allclose(np.eye(fld2.cov_w.shape[0]), fld2.cov_w, atol=1e-8) def run(): print('\n' + '-' * 72) print('Running math tests.') print('-' * 72) for T in [SpyMathTest, PCATest, LDATest]: T().run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/spytest.py000066400000000000000000000045631412674721200204760ustar00rootroot00000000000000''' Base class for all tests. ''' from __future__ import absolute_import, division, print_function, unicode_literals import collections import sys class SpyTest(object): '''Base class for test cases. Test classes are created by sub-classing SpyTest and defining methods whose names start with "test_". ''' def setup(self): '''Method to be run before derived class test methods are called.''' pass def finish(self): '''Method run after all test methods have run.''' pass def run(self): '''Runs all "test_*" methods in a derived class. Before running subclass test_ methods, the `startup` method will be called. After all test_ methods have been run, the `finish` method is called. ''' import spectral.tests as tests from spectral.tests import abort_on_fail self.setup() class NullStdOut(object): def write(*args, **kwargs): pass def flush(self): pass null = NullStdOut() methods = [getattr(self, s) for s in sorted(dir(self)) if s.startswith('test_')] methods = [m for m in methods if isinstance(m, collections.Callable)] stdout = sys.stdout for method in methods: print(format('Testing ' + method.__name__.split('_', 1)[-1], '.<60'), end=' ') tests._num_tests_run += 1 try: sys.stdout = null method() stdout.write('OK\n') except AssertionError: stdout.write('FAILED\n') tests._num_tests_failed += 1 if tests.abort_on_fail: raise finally: sys.stdout = stdout self.finish() # The following test method is now deprecated and should no longer be used. def test_method(method): '''Decorator function for unit tests.''' import spectral.tests as tests def meth(self): print(format('Testing ' + method.__name__.split('_', 1)[-1], '.<40'), end=' ') try: method(self) print('OK') tests._num_tests_run += 1 except AssertionError: print('FAILED') tests._num_tests_failed += 1 if tests.abort_on_fail: raise return meth spectral-0.22.4/spectral/tests/test_template.txt000066400000000000000000000013761412674721200220230ustar00rootroot00000000000000''' Runs unit tests for XXX. To run the unit tests, type the following from the system command line: # python -m spectral.tests.XXX ''' from __future__ import division, print_function, unicode_literals import numpy as np from numpy.testing import assert_allclose from spectral.tests.spytest import SpyTest class FooTest(SpyTest): '''Tests various math functions.''' def setup(self): pass def test_foo(self): assert_allclose(1, 1) def run(): print('\n' + '-' * 72) print('Running XXX tests.') print('-' * 72) test = FooTest() test.run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/tests/transforms.py000066400000000000000000000125231412674721200211540ustar00rootroot00000000000000''' Runs unit tests for linear transforms of spectral data & data files. The unit tests in this module assume the example file "92AV3C.lan" is in the spectral data path. After the file is opened, unit tests verify that LinearTransform objects created with SpyFile and numpy.ndarray objects yield the correct values for known image data values. To run the unit tests, type the following from the system command line: # python -m spectral.tests.transforms ''' from __future__ import division, print_function, unicode_literals import numpy as np from numpy.testing import assert_almost_equal import spectral as spy from spectral.algorithms.transforms import LinearTransform from spectral.io.spyfile import SpyFile from spectral.tests.spytest import SpyTest class LinearTransformTest(SpyTest): '''Tests that LinearTransform objects produce correct values.''' def __init__(self, file, datum, value): ''' Arguments: `file` (str or `SpyFile`): The SpyFile to be tested. This can be either the name of the file or a SpyFile object that has already been opened. `datum` (3-tuple of ints): (i, j, k) are the row, column and band of the datum to be tested. 'i' and 'j' should be at least 10 pixels away from the edge of the associated image and `k` should have at least 10 bands above and below it in the image. `value` (int or float): The scalar value associated with location (i, j, k) in the image. ''' self.file = file self.datum = datum self.value = value def setup(self): if isinstance(self.file, SpyFile): self.image = self.file elif isinstance(self.file, np.ndarray): self.image = self.file else: self.image = spy.open_image(self.file) self.scalar = 10. self.matrix = self.scalar * np.identity(self.image.shape[2], dtype='f8') self.pre = 37. self.post = 51. def test_scalar_multiply(self): (i, j, k) = self.datum transform = LinearTransform(self.scalar) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * self.value) def test_pre_scalar_multiply(self): (i, j, k) = self.datum transform = LinearTransform(self.scalar, pre=self.pre) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * (self.pre + self.value)) def test_scalar_multiply_post(self): (i, j, k) = self.datum transform = LinearTransform(self.scalar, post=self.post) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * self.value + self.post) def test_pre_scalar_multiply_post(self): (i, j, k) = self.datum transform = LinearTransform(self.scalar, pre=self.pre, post=self.post) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * (self.pre + self.value) + self.post) def test_matrix_multiply(self): (i, j, k) = self.datum transform = LinearTransform(self.matrix) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * self.value) def test_pre_matrix_multiply(self): (i, j, k) = self.datum transform = LinearTransform(self.matrix, pre=self.pre) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * (self.pre + self.value)) def test_matrix_multiply_post(self): (i, j, k) = self.datum transform = LinearTransform(self.matrix, post=self.post) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * self.value + self.post) def test_pre_matrix_multiply_post(self): (i, j, k) = self.datum transform = LinearTransform(self.matrix, pre=self.pre, post=self.post) result = transform(self.image[i, j])[k] assert_almost_equal(result, self.scalar * (self.pre + self.value) + self.post) def run(): (fname, datum, value) = ('92AV3C.lan', (99, 99, 99), 2057.0) image = spy.open_image(fname) print('\n' + '-' * 72) print('Running LinearTransform tests on SpyFile object.') print('-' * 72) test = LinearTransformTest(image, datum, value) test.run() data = image.load() print('\n' + '-' * 72) print('Running LinearTransform tests on ImageArray object.') print('-' * 72) test = LinearTransformTest(data, datum, value) test.run() image.scale_factor = 10000.0 print('\n' + '-' * 72) print('Running LinearTransform tests on SpyFile object with scale factor.') print('-' * 72) test = LinearTransformTest(image, datum, value / 10000.0) test.run() if __name__ == '__main__': from spectral.tests.run import parse_args, reset_stats, print_summary parse_args() reset_stats() run() print_summary() spectral-0.22.4/spectral/utilities/000077500000000000000000000000001412674721200172525ustar00rootroot00000000000000spectral-0.22.4/spectral/utilities/__init__.py000066400000000000000000000001261412674721200213620ustar00rootroot00000000000000from __future__ import absolute_import, division, print_function, unicode_literals spectral-0.22.4/spectral/utilities/errors.py000066400000000000000000000006311412674721200211400ustar00rootroot00000000000000from __future__ import absolute_import, division, print_function, unicode_literals import numpy as np class SpyException(Exception): '''Base class for spectral module-specific exceptions.''' pass class NaNValueWarning(UserWarning): pass class NaNValueError(ValueError): pass def has_nan(X): '''returns True if ndarray `X` contains a NaN value.''' return bool(np.isnan(np.min(X))) spectral-0.22.4/spectral/utilities/python23.py000066400000000000000000000017411412674721200213150ustar00rootroot00000000000000''' Functions for python 2/3 compatibility. ''' from __future__ import absolute_import, division, print_function, unicode_literals import sys IS_PYTHON3 = sys.version_info >= (3,) def typecode(t): '''Typecode handling for array module. Python 3 expects a unicode character, whereas python 2 expects a byte char. Arguments: `t` (typecode string): An input for array.array. Return value: The input formatted for the appropriate python version. ''' if IS_PYTHON3: return t else: return chr(ord(t)) if IS_PYTHON3: def is_string(s): return isinstance(s, (str, bytes)) else: def is_string(s): return isinstance(s, basestring) # array.tostring is deprecated in python3 if IS_PYTHON3: tobytes = lambda array: array.tobytes() frombytes = lambda array, src: array.frombytes(src) else: tobytes = lambda array: array.tostring() frombytes = lambda array, src: array.fromstring(src) spectral-0.22.4/spectral/utilities/status.py000066400000000000000000000042651412674721200211560ustar00rootroot00000000000000''' A class for display completion status for long-running iterative operations. ''' from __future__ import division, print_function, unicode_literals import sys from .. import settings class StatusDisplay: ''' A class to sequentially display percentage completion of an iterative process on a single line. ''' def __init__(self): self._pretext = '' self._overwrite = False self._percent_fmt = '% 5.1f' self._text_len = 0 def display_percentage(self, text, percent=0.0, format='% 5.1f'): '''Called when initializing display of a process status.''' self._overwrite = True self._pretext = text self._percent_fmt = format text = self._pretext + self._percent_fmt % percent + '%' self._text_len = len(text) if hasattr(sys, 'ps1') and settings.show_progress: sys.stdout.write(text) sys.stdout.flush() def update_percentage(self, percent): '''Called whenever an update of the displayed status is desired.''' if not (hasattr(sys, 'ps1') and settings.show_progress): return text = self._pretext + self._percent_fmt % percent + '%' sys.stdout.write('\b' * self._text_len) self._text_len = len(text) sys.stdout.write(text) sys.stdout.flush() def end_percentage(self, text='done'): '''Prints a final status and resumes normal text display.''' if not (hasattr(sys, 'ps1') and settings.show_progress): return text = self._pretext + text sys.stdout.write('\b' * self._text_len) fmt = '%%-%ds\n' % self._text_len self._text_len = len(text) sys.stdout.write(fmt % text) sys.stdout.flush() self._overwrite = False def write(self, text): ''' Called to display text on a new line without interrupting progress display. ''' if not (hasattr(sys, 'ps1') and settings.show_progress): return if self._overwrite and text != '\n': sys.stdout.write('\n') sys.stdout.write(text) sys.stdout.flush()